Skip to content

Instantly share code, notes, and snippets.

@sandipan1
Created August 28, 2019 11:36
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save sandipan1/3227553727509446f4ef4cc103ade5e8 to your computer and use it in GitHub Desktop.
Save sandipan1/3227553727509446f4ef4cc103ade5e8 to your computer and use it in GitHub Desktop.
syncing frame and color values
08-28 17:03:35.998 23208 23208 I Adreno : QUALCOMM build : 84c9b58, I4fcced3c6d
08-28 17:03:35.998 23208 23208 I Adreno : Build Date : 01/17/18
08-28 17:03:35.998 23208 23208 I A adb logcat -s native:* tflite:* DEBUG:* Adreno:* MainActivity:* AndroidRuntime:* WindowManager:* ExternalTextureConv:* FrameProcessor:*
dreno : OpenGL ES Shader Compiler Version: XE031.09.00.04
08-28 17:03:35.998 23208 23208 I Adreno : Local Branch :
08-28 17:03:35.998 23208 23208 I Adreno : Remote Branch : refs/tags/AU_LINUX_ANDROID_LA.UM.5.5.R1.07.01.02.269.061
08-28 17:03:35.998 23208 23208 I Adreno : Remote Branch : NONE
08-28 17:03:35.998 23208 23208 I Adreno : Reconstruct Branch : NOTHING
08-28 17:03:36.849 23208 23229 D ExternalTextureConv: Created output texture: 2 width: 1135 height: 1513
08-28 17:03:36.881 23208 23229 D ExternalTextureConv: Created output texture: 3 width: 1135 height: 1513
08-28 17:03:36.950 23208 23228 I tflite : Initialized TensorFlow Lite runtime.
08-28 17:03:36.952 23208 23228 I tflite : Created TensorFlow Lite delegate for GPU.
08-28 17:03:45.951 1555 1555 W WindowManager: Attempted to remove non-existing token: android.os.Binder@df28a2d
08-28 17:03:58.082 1555 1555 W WindowManager: Attempted to remove non-existing token: android.os.Binder@187acdc
package com.google.mediapipe.apps.hairsegmentationgpu;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.SeekBar;
import android.widget.Toast;
import android.util.Log;
import androidx.appcompat.app.AppCompatActivity;
import com.google.mediapipe.components.CameraHelper;
import com.google.mediapipe.components.CameraXPreviewHelper;
import com.google.mediapipe.components.ExternalTextureConverter;
import com.google.mediapipe.components.TextureFrameConsumer;
import com.google.mediapipe.framework.TextureFrame;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.components.PermissionHelper;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.glutil.EglManager;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketCreator;
import com.google.mediapipe.framework.AndroidPacketCreator;
/**
* Main activity of MediaPipe example apps.
*/
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final String BINARY_GRAPH_NAME = "hairsegmentationgpu.binarypb";
private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
private static final String RED_INPUT_STREAM ="red";
private static final String GREEN_INPUT_STREAM ="green";
private static final String BLUE_INPUT_STREAM ="blue";
private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
private static final boolean FLIP_FRAMES_VERTICALLY = true;
protected int red_progress = 0;
protected int blue_progress = 0;
protected int green_progress = 0;
public Packet red_packet;
public Packet green_packet;
public Packet blue_packet;
SeekBar red_seekBar;
SeekBar green_seekBar;
SeekBar blue_seekBar;
static {
// Load all native libraries needed by the app.
System.loadLibrary("mediapipe_jni");
System.loadLibrary("opencv_java4");
}
// {@link SurfaceTexture} where the camera-preview frames can be accessed.
private SurfaceTexture previewFrameTexture;
// {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
private SurfaceView previewDisplayView;
// Creates and manages an {@link EGLContext}.
private EglManager eglManager;
// Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
// frames onto a {@link Surface}.
private FrameProcessor processor;
// Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
// consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
private ExternalTextureConverter converter;
// Handles camera access via the {@link CameraX} Jetpack support library.
private CameraXPreviewHelper cameraHelper;
private RGBHandler rgbHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
red_seekBar = (SeekBar) findViewById(R.id.red_seekbar);
blue_seekBar = (SeekBar) findViewById(R.id.green_seekbar);
green_seekBar = (SeekBar) findViewById(R.id.blue_seekbar);
red_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
red_progress = i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
Toast.makeText(MainActivity.this, "Seek bar progress is :" + red_progress,
Toast.LENGTH_SHORT).show();
}
});
green_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
green_progress = i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
Toast.makeText(MainActivity.this, "Seek bar progress is :" + green_progress,
Toast.LENGTH_SHORT).show();
}
});
blue_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
blue_progress= i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
Toast.makeText(MainActivity.this, "Seek bar progress is :" + blue_progress,
Toast.LENGTH_SHORT).show();
}
});
previewDisplayView = new SurfaceView(this);
setupPreviewDisplayView();
// Initilize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
// binary graphs.
AndroidAssetUtil.initializeNativeAssetManager(this);
// packetcreator =new PacketCreator();
// red_packet = packetcreator.createInt32(red_progress);
// green_packet = packetcreator.createInt32(green_progress);
// blue_packet = packetcreator.createInt32(blue_progress);
eglManager = new EglManager(null);
processor =
new FrameProcessor(
this,
eglManager.getNativeContext(),
BINARY_GRAPH_NAME,
INPUT_VIDEO_STREAM_NAME,
OUTPUT_VIDEO_STREAM_NAME);
rgbHandler = new RGBHandler();
processor.setConsumer(rgbHandler);
processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
PermissionHelper.checkAndRequestCameraPermissions(this);
// Log.d(TAG, "sending 1st packets");
// red_packet = processor.getPacketCreator().createInt32(red_progress);
// green_packet = processor.getPacketCreator().createInt32(green_progress);
// blue_packet = processor.getPacketCreator().createInt32(blue_progress);
// processor.getGraph().addConsumablePacketToInputStream(RED_INPUT_STREAM,
// red_packet, System.currentTimeMillis());
// processor.getGraph().addConsumablePacketToInputStream(GREEN_INPUT_STREAM,
// green_packet, System.currentTimeMillis());
// processor.getGraph().addConsumablePacketToInputStream(BLUE_INPUT_STREAM,
// blue_packet, System.currentTimeMillis());
// Log.d(TAG,"packets sent .. releasing packets");
// red_packet.release();
// green_packet.release();
// blue_packet.release();
// processor.setInputSidePackets("rgb_reference", t);
// processor.setInputSidePackets("green_value",green_progress);
// processor.setInputSidePackets("blue_value", blue_progress);
}
@Override
protected void onResume() {
super.onResume();
converter = new ExternalTextureConverter(eglManager.getContext());
converter.setFlipY(FLIP_FRAMES_VERTICALLY);
converter.setConsumer(processor);
if (PermissionHelper.cameraPermissionsGranted(this)) {
startCamera();
}
}
@Override
protected void onPause() {
super.onPause();
converter.close();
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
private void setupPreviewDisplayView() {
previewDisplayView.setVisibility(View.GONE);
ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
viewGroup.addView(previewDisplayView);
previewDisplayView
.getHolder()
.addCallback(
new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// (Re-)Compute the ideal size of the camera-preview display (the area that the
// camera-preview frames get rendered onto, potentially with scaling and rotation)
// based on the size of the SurfaceView that contains the display.
Size viewSize = new Size(width, height);
Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize);
// Connect the converter to the camera-preview frames as its input (via
// previewFrameTexture), and configure the output width and height as the computed
// display size.
converter.setSurfaceTextureAndAttachToGLContext(
previewFrameTexture, displaySize.getWidth(), displaySize.getHeight());
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(null);
}
});
}
private void startCamera() {
cameraHelper = new CameraXPreviewHelper();
cameraHelper.setOnCameraStartedListener(
surfaceTexture -> {
previewFrameTexture = surfaceTexture;
// Make the display view visible to start showing the preview. This triggers the
// SurfaceHolder.Callback added to (the holder of) previewDisplayView.
previewDisplayView.setVisibility(View.VISIBLE);
});
cameraHelper.startCamera(this, CAMERA_FACING, /*surfaceTexture=*/ null);
}
private class RGBHandler implements TextureFrameConsumer {
@Override
public void onNewFrame(TextureFrame frame) {
long sync_time =frame.getTimestamp();
//send the other packets to the graph
red_packet = processor.getPacketCreator().createInt32(red_progress);
green_packet = processor.getPacketCreator().createInt32(green_progress);
blue_packet = processor.getPacketCreator().createInt32(blue_progress);
processor.getGraph().addConsumablePacketToInputStream(RED_INPUT_STREAM,
red_packet, sync_time);
processor.getGraph().addConsumablePacketToInputStream(GREEN_INPUT_STREAM,
green_packet, sync_time);
processor.getGraph().addConsumablePacketToInputStream(BLUE_INPUT_STREAM,
blue_packet, sync_time);
red_packet.release();
green_packet.release();
blue_packet.release();
frame.release();
}
}
}
package com.google.mediapipe.apps.hairsegmentationgpu;
import android.graphics.SurfaceTexture;
import android.os.Bundle;
import android.util.Size;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.widget.SeekBar;
import android.widget.Toast;
import android.util.Log;
import androidx.appcompat.app.AppCompatActivity;
import com.google.mediapipe.components.CameraHelper;
import com.google.mediapipe.components.CameraXPreviewHelper;
import com.google.mediapipe.components.ExternalTextureConverter;
import com.google.mediapipe.components.TextureFrameConsumer;
import com.google.mediapipe.framework.TextureFrame;
import com.google.mediapipe.components.FrameProcessor;
import com.google.mediapipe.components.PermissionHelper;
import com.google.mediapipe.framework.AndroidAssetUtil;
import com.google.mediapipe.glutil.EglManager;
import com.google.mediapipe.framework.Packet;
import com.google.mediapipe.framework.PacketCreator;
import com.google.mediapipe.framework.AndroidPacketCreator;
/**
* Main activity of MediaPipe example apps.
*/
public class MainActivity extends AppCompatActivity {
private static final String TAG = "MainActivity";
private static final String BINARY_GRAPH_NAME = "hairsegmentationgpu.binarypb";
private static final String INPUT_VIDEO_STREAM_NAME = "input_video";
private static final String OUTPUT_VIDEO_STREAM_NAME = "output_video";
private static final String RED_INPUT_STREAM ="red";
private static final String GREEN_INPUT_STREAM ="green";
private static final String BLUE_INPUT_STREAM ="blue";
private static final CameraHelper.CameraFacing CAMERA_FACING = CameraHelper.CameraFacing.FRONT;
private static final boolean FLIP_FRAMES_VERTICALLY = true;
protected int red_progress = 0;
protected int blue_progress = 0;
protected int green_progress = 0;
public Packet red_packet;
public Packet green_packet;
public Packet blue_packet;
SeekBar red_seekBar;
SeekBar green_seekBar;
SeekBar blue_seekBar;
static {
// Load all native libraries needed by the app.
System.loadLibrary("mediapipe_jni");
System.loadLibrary("opencv_java4");
}
// {@link SurfaceTexture} where the camera-preview frames can be accessed.
private SurfaceTexture previewFrameTexture;
// {@link SurfaceView} that displays the camera-preview frames processed by a MediaPipe graph.
private SurfaceView previewDisplayView;
// Creates and manages an {@link EGLContext}.
private EglManager eglManager;
// Sends camera-preview frames into a MediaPipe graph for processing, and displays the processed
// frames onto a {@link Surface}.
private FrameProcessor processor;
// Converts the GL_TEXTURE_EXTERNAL_OES texture from Android camera into a regular texture to be
// consumed by {@link FrameProcessor} and the underlying MediaPipe graph.
private ExternalTextureConverter converter;
// Handles camera access via the {@link CameraX} Jetpack support library.
private CameraXPreviewHelper cameraHelper;
private RGBHandler rgbHandler;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
red_seekBar = (SeekBar) findViewById(R.id.red_seekbar);
blue_seekBar = (SeekBar) findViewById(R.id.green_seekbar);
green_seekBar = (SeekBar) findViewById(R.id.blue_seekbar);
red_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
red_progress = i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
Toast.makeText(MainActivity.this, "Seek bar progress is :" + red_progress,
Toast.LENGTH_SHORT).show();
}
});
green_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
green_progress = i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
Toast.makeText(MainActivity.this, "Seek bar progress is :" + green_progress,
Toast.LENGTH_SHORT).show();
}
});
blue_seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int i, boolean b) {
blue_progress= i;
}
@Override
public void onStartTrackingTouch(SeekBar seekBar) {
}
@Override
public void onStopTrackingTouch(SeekBar seekBar) {
Toast.makeText(MainActivity.this, "Seek bar progress is :" + blue_progress,
Toast.LENGTH_SHORT).show();
}
});
previewDisplayView = new SurfaceView(this);
setupPreviewDisplayView();
// Initilize asset manager so that MediaPipe native libraries can access the app assets, e.g.,
// binary graphs.
AndroidAssetUtil.initializeNativeAssetManager(this);
// packetcreator =new PacketCreator();
// red_packet = packetcreator.createInt32(red_progress);
// green_packet = packetcreator.createInt32(green_progress);
// blue_packet = packetcreator.createInt32(blue_progress);
eglManager = new EglManager(null);
processor =
new FrameProcessor(
this,
eglManager.getNativeContext(),
BINARY_GRAPH_NAME,
INPUT_VIDEO_STREAM_NAME,
OUTPUT_VIDEO_STREAM_NAME);
rgbHandler = new RGBHandler();
processor.setConsumer(rgbHandler);
processor.getVideoSurfaceOutput().setFlipY(FLIP_FRAMES_VERTICALLY);
PermissionHelper.checkAndRequestCameraPermissions(this);
// Log.d(TAG, "sending 1st packets");
// red_packet = processor.getPacketCreator().createInt32(red_progress);
// green_packet = processor.getPacketCreator().createInt32(green_progress);
// blue_packet = processor.getPacketCreator().createInt32(blue_progress);
// processor.getGraph().addConsumablePacketToInputStream(RED_INPUT_STREAM,
// red_packet, System.currentTimeMillis());
// processor.getGraph().addConsumablePacketToInputStream(GREEN_INPUT_STREAM,
// green_packet, System.currentTimeMillis());
// processor.getGraph().addConsumablePacketToInputStream(BLUE_INPUT_STREAM,
// blue_packet, System.currentTimeMillis());
// Log.d(TAG,"packets sent .. releasing packets");
// red_packet.release();
// green_packet.release();
// blue_packet.release();
// processor.setInputSidePackets("rgb_reference", t);
// processor.setInputSidePackets("green_value",green_progress);
// processor.setInputSidePackets("blue_value", blue_progress);
}
@Override
protected void onResume() {
super.onResume();
converter = new ExternalTextureConverter(eglManager.getContext());
converter.setFlipY(FLIP_FRAMES_VERTICALLY);
converter.setConsumer(processor);
if (PermissionHelper.cameraPermissionsGranted(this)) {
startCamera();
}
}
@Override
protected void onPause() {
super.onPause();
converter.close();
}
@Override
public void onRequestPermissionsResult(
int requestCode, String[] permissions, int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
PermissionHelper.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
private void setupPreviewDisplayView() {
previewDisplayView.setVisibility(View.GONE);
ViewGroup viewGroup = findViewById(R.id.preview_display_layout);
viewGroup.addView(previewDisplayView);
previewDisplayView
.getHolder()
.addCallback(
new SurfaceHolder.Callback() {
@Override
public void surfaceCreated(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(holder.getSurface());
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
// (Re-)Compute the ideal size of the camera-preview display (the area that the
// camera-preview frames get rendered onto, potentially with scaling and rotation)
// based on the size of the SurfaceView that contains the display.
Size viewSize = new Size(width, height);
Size displaySize = cameraHelper.computeDisplaySizeFromViewSize(viewSize);
// Connect the converter to the camera-preview frames as its input (via
// previewFrameTexture), and configure the output width and height as the computed
// display size.
converter.setSurfaceTextureAndAttachToGLContext(
previewFrameTexture, displaySize.getWidth(), displaySize.getHeight());
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
processor.getVideoSurfaceOutput().setSurface(null);
}
});
}
private void startCamera() {
cameraHelper = new CameraXPreviewHelper();
cameraHelper.setOnCameraStartedListener(
surfaceTexture -> {
previewFrameTexture = surfaceTexture;
// Make the display view visible to start showing the preview. This triggers the
// SurfaceHolder.Callback added to (the holder of) previewDisplayView.
previewDisplayView.setVisibility(View.VISIBLE);
});
cameraHelper.startCamera(this, CAMERA_FACING, /*surfaceTexture=*/ null);
}
private class RGBHandler implements TextureFrameConsumer {
@Override
public void onNewFrame(TextureFrame frame) {
long sync_time =frame.getTimestamp();
//send the other packets to the graph
red_packet = processor.getPacketCreator().createInt32(red_progress);
green_packet = processor.getPacketCreator().createInt32(green_progress);
blue_packet = processor.getPacketCreator().createInt32(blue_progress);
processor.getGraph().addConsumablePacketToInputStream(RED_INPUT_STREAM,
red_packet, sync_time);
processor.getGraph().addConsumablePacketToInputStream(GREEN_INPUT_STREAM,
green_packet, sync_time);
processor.getGraph().addConsumablePacketToInputStream(BLUE_INPUT_STREAM,
blue_packet, sync_time);
red_packet.release();
green_packet.release();
blue_packet.release();
frame.release();
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment