Skip to content

Instantly share code, notes, and snippets.

@siralam
Last active June 23, 2024 05:30
Show Gist options
  • Save siralam/1c4000a5af069ddb366705edd33ebeea to your computer and use it in GitHub Desktop.
Save siralam/1c4000a5af069ddb366705edd33ebeea to your computer and use it in GitHub Desktop.
Zoomable Camera2Preview, but cannot gain focus on 2nd capture after zoomed.
package cameraview.com.cameraview;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.Matrix;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.support.annotation.NonNull;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.MotionEvent;
import android.view.Surface;
import android.view.TextureView;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
/**
* The Camera Preview, using Camera 2 Engine.
* I would say Camera 2 is just fxxking hard to understand and implement by yourself.
* Most of the code here is a copy of https://github.com/googlesamples/android-Camera2Basic
* But adding the front and back camera switching, as well as configurable aspect ratio.
*/
@SuppressLint("NewApi")
public class Camera2Preview extends TextureView {
public static final String TAG = "Camera2Preview";
public static final int BACK_CAMERA = CameraCharacteristics.LENS_FACING_BACK;
public static final int FRONT_CAMERA = CameraCharacteristics.LENS_FACING_FRONT;
protected Context context;
protected float aspectRatio = 0.75f;
private static final float aspectRatioThreshold = 0.05f;
private String cameraId;
private int facing = BACK_CAMERA;
protected CameraDevice cameraDevice;
protected CameraCharacteristics cameraCharacteristics;
protected CameraCaptureSession captureSession;
protected CaptureRequest previewRequest;
protected CaptureRequest.Builder previewRequestBuilder;
protected Size previewSize;
protected int sensorOrientation;
//Lock
private Semaphore cameraOpenCloseLock = new Semaphore(1);
private int state = STATE_PREVIEW;
private static final int STATE_PREVIEW = 0;
private static final int STATE_WAITING_LOCK = 1;
private static final int STATE_WAITING_PRECAPTURE = 2;
private static final int STATE_WAITING_NON_PRECAPTURE = 3;
private static final int STATE_PICTURE_TAKEN = 4;
//Zooming
public float fingerSpacing = 0;
public float zoomLevel = 1f;
public float maximumZoomLevel;
public Rect zoom;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private ImageReader imageReader;
private Handler backgroundHandler;
private HandlerThread backgroundThread;
protected OnPictureTakenListener onPictureTakenListener;
protected CameraView.OnCameraErrorListener cameraErrorCallback;
protected CameraView.OnFocusLockedListener focusLockedCallback;
private final TextureView.SurfaceTextureListener surfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
openCamera(width, height);
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
// This method is called when the camera is opened. We start camera preview here.
cameraOpenCloseLock.release();
Camera2Preview.this.cameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
cameraOpenCloseLock.release();
cameraDevice.close();
Camera2Preview.this.cameraDevice = null;
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
cameraOpenCloseLock.release();
cameraDevice.close();
Camera2Preview.this.cameraDevice = null;
}
};
private final ImageReader.OnImageAvailableListener onImageAvailableListener
= new ImageReader.OnImageAvailableListener() {
@SuppressLint("NewApi")
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
if (onPictureTakenListener == null) return;
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
onPictureTakenListener.onPictureTaken(bytes);
image.close();
} catch (final Exception e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
};
private CameraCaptureSession.CaptureCallback captureCallback
= new CameraCaptureSession.CaptureCallback() {
private void process(CaptureResult result) {
switch (state) {
case STATE_PREVIEW: {
// We have nothing to do when the camera preview is working normally.
break;
}
case STATE_WAITING_LOCK: {
Integer afState = result.get(CaptureResult.CONTROL_AF_STATE);
Log.i("cameraFocus", "" + afState);
if (afState == null) {
captureStillPicture();
} else if (CaptureResult.CONTROL_AF_STATE_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_NOT_FOCUSED_LOCKED == afState ||
CaptureResult.CONTROL_AF_STATE_INACTIVE == afState /*add this*/) {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_CONVERGED) {
state = STATE_PICTURE_TAKEN;
captureStillPicture();
} else {
runPrecaptureSequence();
}
}
break;
}
case STATE_WAITING_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null ||
aeState == CaptureResult.CONTROL_AE_STATE_PRECAPTURE ||
aeState == CaptureRequest.CONTROL_AE_STATE_FLASH_REQUIRED) {
state = STATE_WAITING_NON_PRECAPTURE;
}
break;
}
case STATE_WAITING_NON_PRECAPTURE: {
// CONTROL_AE_STATE can be null on some devices
Integer aeState = result.get(CaptureResult.CONTROL_AE_STATE);
if (aeState == null || aeState != CaptureResult.CONTROL_AE_STATE_PRECAPTURE) {
state = STATE_PICTURE_TAKEN;
captureStillPicture();
}
break;
}
}
}
@Override
public void onCaptureProgressed(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull CaptureResult partialResult) {
process(partialResult);
}
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
process(result);
}
};
public Camera2Preview(Context context) {
super(context);
init(context);
}
public Camera2Preview(Context context, float aspectRatio) {
super(context);
this.aspectRatio = aspectRatio;
init(context);
}
public Camera2Preview(Context context, AttributeSet attrs) {
super(context, attrs);
init(context);
}
public Camera2Preview(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init(context);
}
private void init(Context context) {
this.context = context;
}
private static Size chooseOptimalSize(Size[] choices, int textureViewWidth,
int textureViewHeight, int maxWidth, int maxHeight, Size aspectRatio) {
// Collect the supported resolutions that are at least as big as the preview Surface
List<Size> bigEnough = new ArrayList<>();
// Collect the supported resolutions that are smaller than the preview Surface
List<Size> notBigEnough = new ArrayList<>();
int w = aspectRatio.getWidth();
int h = aspectRatio.getHeight();
for (Size option : choices) {
if (option.getWidth() <= maxWidth && option.getHeight() <= maxHeight &&
option.getHeight() == option.getWidth() * h / w) {
if (option.getWidth() >= textureViewWidth &&
option.getHeight() >= textureViewHeight) {
bigEnough.add(option);
} else {
notBigEnough.add(option);
}
}
}
// Pick the smallest of those big enough. If there is no one big enough, pick the
// largest of those not big enough.
if (bigEnough.size() > 0) {
return Collections.min(bigEnough, new CompareSizesByArea());
} else if (notBigEnough.size() > 0) {
return Collections.max(notBigEnough, new CompareSizesByArea());
} else {
Log.e(TAG, "Couldn't find any suitable preview size");
return choices[0];
}
}
private void setUpCameraOutputs(int width, int height) {
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
String cameraId = null;
try {
for (String each : manager.getCameraIdList()) {
if (this.facing == manager.getCameraCharacteristics(each).get(CameraCharacteristics.LENS_FACING)) {
cameraId = each;
break;
}
}
if (cameraId == null) throw new Exception("No correct facing camera is found.");
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
cameraCharacteristics = characteristics;
maximumZoomLevel = (characteristics.get(CameraCharacteristics.SCALER_AVAILABLE_MAX_DIGITAL_ZOOM));
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
if (map == null) {
throw new Exception("configuration map is null.");
}
// For still image captures, we use the largest available size.
Size largest;
ArrayList<Size> sizes = new ArrayList<>();
for (Size each : map.getOutputSizes(ImageFormat.JPEG)) {
float thisAspect = (float) each.getHeight() / each.getWidth();
if ((Math.abs(thisAspect - aspectRatio)) < aspectRatioThreshold) {
sizes.add(each);
}
}
if (sizes.size() == 0) return;
largest = Collections.max(sizes, new CompareSizesByArea());
imageReader = ImageReader.newInstance(largest.getWidth(), largest.getHeight(),
ImageFormat.JPEG, /*maxImages*/3);
imageReader.setOnImageAvailableListener(
onImageAvailableListener, backgroundHandler);
// Find out if we need to swap dimension to get the preview size relative to sensor
// coordinate.
int displayRotation = getDisplay().getRotation();
//noinspection ConstantConditions
sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
boolean swappedDimensions = false;
switch (displayRotation) {
case Surface.ROTATION_0:
case Surface.ROTATION_180:
if (sensorOrientation == 90 || sensorOrientation == 270) {
swappedDimensions = true;
}
break;
case Surface.ROTATION_90:
case Surface.ROTATION_270:
if (sensorOrientation == 0 || sensorOrientation == 180) {
swappedDimensions = true;
}
break;
default:
Log.e(TAG, "Display rotation is invalid: " + displayRotation);
}
Point displaySize = new Point();
getDisplay().getSize(displaySize);
int rotatedPreviewWidth = width;
int rotatedPreviewHeight = height;
int maxPreviewWidth = displaySize.x;
int maxPreviewHeight = displaySize.y;
if (swappedDimensions) {
rotatedPreviewWidth = height;
rotatedPreviewHeight = width;
maxPreviewWidth = displaySize.y;
maxPreviewHeight = displaySize.x;
}
// Danger, W.R.! Attempting to use too large a preview size could exceed the camera
// bus' bandwidth limitation, resulting in gorgeous previews but the storage of
// garbage capture data.
previewSize = chooseOptimalSize(map.getOutputSizes(SurfaceTexture.class),
rotatedPreviewWidth, rotatedPreviewHeight, maxPreviewWidth,
maxPreviewHeight, largest);
this.cameraId = cameraId;
} catch (final Exception e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
@SuppressLint("MissingPermission")
private void openCamera(int width, int height) {
setUpCameraOutputs(width, height);
configureTransform(width, height);
CameraManager manager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
try {
if (!cameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
manager.openCamera(cameraId, stateCallback, backgroundHandler);
} catch (final Exception e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
private void closeCamera() {
try {
cameraOpenCloseLock.acquire();
if (null != captureSession) {
captureSession.close();
captureSession = null;
}
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
cameraOpenCloseLock.release();
}
}
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(previewSize.getWidth(), previewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
previewRequestBuilder
= cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
previewRequestBuilder.addTarget(surface);
// Here, we create a CameraCaptureSession for camera preview.
cameraDevice.createCaptureSession(Arrays.asList(surface, imageReader.getSurface()),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
captureSession = cameraCaptureSession;
try {
// Auto focus should be continuous for camera preview.
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Finally, we start displaying the camera preview.
previewRequest = previewRequestBuilder.build();
captureSession.setRepeatingRequest(previewRequest,
captureCallback, backgroundHandler);
} catch (final CameraAccessException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
}
}, null
);
} catch (final CameraAccessException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
private void configureTransform(int viewWidth, int viewHeight) {
if (null == previewSize) {
return;
}
int rotation = getDisplay().getRotation();
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, previewSize.getHeight(), previewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / previewSize.getHeight(),
(float) viewWidth / previewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate(90 * (rotation - 2), centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
setTransform(matrix);
}
public void takePicture() {
lockFocus();
}
private void lockFocus() {
try {
// This is how to tell the camera to lock focus.
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_START);
// Tell #captureCallback to wait for the lock.
state = STATE_WAITING_LOCK;
captureSession.capture(previewRequestBuilder.build(), captureCallback,
backgroundHandler);
} catch (final CameraAccessException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
private void unlockFocus() {
try {
// Reset the auto-focus trigger
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_TRIGGER,
CameraMetadata.CONTROL_AF_TRIGGER_CANCEL);
captureSession.capture(previewRequestBuilder.build(), captureCallback,
backgroundHandler);
// After this, the camera will go back to the normal state of preview.
state = STATE_PREVIEW;
//resume Zoom effect after taking a picture
previewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
if (zoom != null) previewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
captureSession.setRepeatingRequest(previewRequestBuilder.build(), captureCallback,
backgroundHandler);
} catch (final CameraAccessException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
private void runPrecaptureSequence() {
try {
// This is how to tell the camera to trigger.
previewRequestBuilder.set(CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER,
CaptureRequest.CONTROL_AE_PRECAPTURE_TRIGGER_START);
// Tell #captureCallback to wait for the precapture sequence to be set.
state = STATE_WAITING_PRECAPTURE;
captureSession.capture(previewRequestBuilder.build(), captureCallback,
backgroundHandler);
} catch (final CameraAccessException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
private void captureStillPicture() {
try {
if (null == cameraDevice) {
return;
}
// This is the CaptureRequest.Builder that we use to take a picture.
final CaptureRequest.Builder captureBuilder =
cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(imageReader.getSurface());
// Use the same AE and AF modes as the preview.
captureBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
// Orientation
int rotation = getDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, getOrientation(rotation));
//Zoom
if (zoom != null) {
captureBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
}
CameraCaptureSession.CaptureCallback captureCallback
= new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session,
@NonNull CaptureRequest request,
@NonNull TotalCaptureResult result) {
unlockFocus();
}
};
captureSession.stopRepeating();
captureSession.abortCaptures();
if (focusLockedCallback != null) {
post(new Runnable() {
@Override
public void run() {
focusLockedCallback.onFocusLocked();
}
});
}
captureSession.capture(captureBuilder.build(), captureCallback, null);
} catch (final CameraAccessException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
private int getOrientation(int rotation) {
// Sensor orientation is 90 for most devices, or 270 for some devices (eg. Nexus 5X)
// We have to take that into account and rotate JPEG properly.
// For devices with orientation of 90, we simply return our mapping from ORIENTATIONS.
// For devices with orientation of 270, we need to rotate the JPEG 180 degrees.
return (ORIENTATIONS.get(rotation) + sensorOrientation + 270) % 360;
}
static class CompareSizesByArea implements Comparator<Size> {
@Override
public int compare(Size lhs, Size rhs) {
// We cast here to ensure the multiplications won't overflow
return Long.signum((long) lhs.getWidth() * lhs.getHeight() -
(long) rhs.getWidth() * rhs.getHeight());
}
}
// @SuppressLint("NewApi")
// private int getJpegOrientation(CameraCharacteristics characteristics, int deviceOrientation) {
// if (deviceOrientation == android.view.OrientationEventListener.ORIENTATION_UNKNOWN)
// return 0;
// int sensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
//
// // Round device orientation to a multiple of 90
// deviceOrientation = (deviceOrientation + 45) / 90 * 90;
//
// // Reverse device orientation for front-facing cameras
// boolean facingFront = characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT;
// if (facingFront) deviceOrientation = -deviceOrientation;
//
// // Calculate desired JPEG orientation relative to camera orientation to make
// // the image upright relative to the device orientation
// int jpegOrientation = (sensorOrientation + deviceOrientation + 360) % 360;
//
// return jpegOrientation;
// }
protected void startBackgroundThread() {
backgroundThread = new HandlerThread("Camera2 Background");
backgroundThread.start();
backgroundHandler = new Handler(backgroundThread.getLooper());
}
@SuppressLint("NewApi")
protected void stopBackgroundThread() {
if (backgroundThread == null) {
backgroundHandler = null;
return;
}
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
backgroundHandler = null;
} catch (final InterruptedException e) {
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
}
}
public void resume() {
zoom = null;
startBackgroundThread();
if (isAvailable()) {
openCamera(getWidth(), getHeight());
} else {
setSurfaceTextureListener(surfaceTextureListener);
}
}
public void pause() {
closeCamera();
stopBackgroundThread();
}
public void setOnPictureTakenListener(OnPictureTakenListener callback) {
this.onPictureTakenListener = callback;
}
public void setFacing(boolean isTurningToFront) {
facing = isTurningToFront ? FRONT_CAMERA : BACK_CAMERA;
pause();
resume();
}
public void resetFacing () {
facing = BACK_CAMERA;
}
public void setOnCameraErrorListener(CameraView.OnCameraErrorListener callback) {
cameraErrorCallback = callback;
}
public void setOnFocusLockedListener (CameraView.OnFocusLockedListener callback) {
focusLockedCallback = callback;
}
@Override
public boolean onTouchEvent(MotionEvent event) {
try {
Rect rect = cameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);
if (rect == null) return false;
float currentFingerSpacing;
if (event.getPointerCount() == 2) { //Multi touch.
currentFingerSpacing = getFingerSpacing(event);
float delta = 0.05f;
if (fingerSpacing != 0) {
if (currentFingerSpacing > fingerSpacing) {
if ((maximumZoomLevel - zoomLevel) <= delta) {
delta = maximumZoomLevel - zoomLevel;
}
zoomLevel = zoomLevel + delta;
} else if (currentFingerSpacing < fingerSpacing){
if ((zoomLevel - delta) < 1f) {
delta = zoomLevel - 1f;
}
zoomLevel = zoomLevel - delta;
}
float ratio = (float) 1 / zoomLevel;
int croppedWidth = rect.width() - Math.round((float)rect.width() * ratio);
int croppedHeight = rect.height() - Math.round((float)rect.height() * ratio);
zoom = new Rect(croppedWidth/2, croppedHeight/2,
rect.width() - croppedWidth/2, rect.height() - croppedHeight/2);
previewRequestBuilder.set(CaptureRequest.SCALER_CROP_REGION, zoom);
}
fingerSpacing = currentFingerSpacing;
} else { //Single touch point, needs to return true in order to detect one more touch point
return true;
}
captureSession.setRepeatingRequest(previewRequestBuilder.build(), captureCallback, backgroundHandler);
return true;
} catch (final Exception e) {
if (BuildConfig.DEBUG) e.printStackTrace();
if (cameraErrorCallback != null) {
post(new Runnable() {
@Override
public void run() {
cameraErrorCallback.onCameraError(e);
}
});
}
return true;
}
}
private float getFingerSpacing(MotionEvent event) {
float x = event.getX(0) - event.getX(1);
float y = event.getY(0) - event.getY(1);
return (float) Math.sqrt(x * x + y * y);
}
public interface OnPictureTakenListener {
void onPictureTaken(byte[] data);
}
}
@ManuelSchmitzberger
Copy link

IMO you to replace

zoom = new Rect(croppedWidth/2, croppedHeight/2, rect.width() - croppedWidth/2, rect.height() - croppedHeight/2);

with

int left = (rect.width() - croppedWidth) / 2;
int top = (rect.height() - croppedHeight) / 2;
zoom = new Rect(
    left,
    top,
    left + croppedWidth,
    top + croppedHeight);

to zoom into the center of the screen. Or I'm wrong?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment