Skip to content

Instantly share code, notes, and snippets.

@JBurkeKF
Last active February 8, 2023 06:02
Show Gist options
  • Star 4 You must be signed in to star a gist
  • Fork 1 You must be signed in to fork a gist
  • Save JBurkeKF/897834b7c374e5c32dabe206896c1cb0 to your computer and use it in GitHub Desktop.
Save JBurkeKF/897834b7c374e5c32dabe206896c1cb0 to your computer and use it in GitHub Desktop.
Copy a WebRTC I420Frame, convert it to a Bitmap (requires io.pristine:libjingle)
// TODO: Your own package name here
//package com.kittehface;
// Some work based on http://stackoverflow.com/a/12702836 by rics (http://stackoverflow.com/users/21047/rics)
import android.graphics.Bitmap;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
// TODO: project should include io.pristine:libjingle from Maven
import org.webrtc.VideoRenderer;
import java.nio.ByteBuffer;
public class YuvFrame
{
public int width;
public int height;
public int[] yuvStrides;
public byte[] yPlane;
public byte[] uPlane;
public byte[] vPlane;
public int rotationDegree;
public long timestamp;
private final Object planeLock = new Object();
public static final int PROCESSING_NONE = 0x00;
public static final int PROCESSING_CROP_TO_SQUARE = 0x01;
// Constants for indexing I420Frame information, for readability.
private static final int I420_Y = 0;
private static final int I420_V = 1;
private static final int I420_U = 2;
/**
* Creates a YuvFrame from the provided I420Frame. Does no processing, and uses the current time as a timestamp.
* @param i420Frame Source I420Frame.
*/
@SuppressWarnings("unused")
public YuvFrame( final VideoRenderer.I420Frame i420Frame )
{
fromI420Frame( i420Frame, PROCESSING_NONE, System.nanoTime() );
}
/**
* Creates a YuvFrame from the provided I420Frame. Does any processing indicated, and uses the current time as a timestamp.
* @param i420Frame Source I420Frame.
* @param processingFlags Processing flags, YuvFrame.PROCESSING_NONE for no processing.
*/
@SuppressWarnings("unused")
public YuvFrame( final VideoRenderer.I420Frame i420Frame, final int processingFlags )
{
fromI420Frame( i420Frame, processingFlags, System.nanoTime() );
}
/**
* Creates a YuvFrame from the provided I420Frame. Does any processing indicated, and uses the given timestamp.
* @param i420Frame Source I420Frame.
* @param processingFlags Processing flags, YuvFrame.PROCESSING_NONE for no processing.
* @param timestamp The timestamp to give the frame.
*/
public YuvFrame( final VideoRenderer.I420Frame i420Frame, final int processingFlags, final long timestamp )
{
fromI420Frame( i420Frame, processingFlags, timestamp );
}
/**
* Replaces the data in this YuvFrame with the data from the provided frame. Will create new byte arrays to hold pixel data if necessary,
* or will reuse existing arrays if they're already the correct size.
* @param i420Frame Source I420Frame.
* @param processingFlags Processing flags, YuvFrame.PROCESSING_NONE for no processing.
* @param timestamp The timestamp to give the frame.
*/
public void fromI420Frame( final VideoRenderer.I420Frame i420Frame, final int processingFlags, final long timestamp )
{
synchronized ( planeLock )
{
try
{
// Save timestamp
this.timestamp = timestamp;
// TODO: Check to see if i420Frame.yuvFrame is actually true? Need to find out what the alternative would be.
// Copy YUV stride information
// TODO: There is probably a case where strides makes a difference, so far we haven't run across it.
yuvStrides = new int[i420Frame.yuvStrides.length];
System.arraycopy( i420Frame.yuvStrides, 0, yuvStrides, 0, i420Frame.yuvStrides.length );
// Copy rotation information
rotationDegree = i420Frame.rotationDegree; // Just save rotation info for now, doing actual rotation can wait until per-pixel processing.
// Copy the pixel data, processing as requested.
if ( PROCESSING_CROP_TO_SQUARE == ( processingFlags & PROCESSING_CROP_TO_SQUARE ) )
{
copyPlanesCropped( i420Frame );
}
else
{
copyPlanes( i420Frame );
}
}
catch ( Throwable t )
{
dispose();
}
}
}
public void dispose()
{
yPlane = null;
vPlane = null;
uPlane = null;
}
public boolean hasData()
{
return yPlane != null && vPlane != null && uPlane != null;
}
/**
* Copy the Y, V, and U planes from the source I420Frame.
* Sets width and height.
* @param i420Frame Source frame.
*/
private void copyPlanes( final VideoRenderer.I420Frame i420Frame )
{
synchronized ( planeLock )
{
// Copy the Y, V, and U ButeBuffers to their corresponding byte arrays.
// Existing byte arrays are passed in for possible reuse.
yPlane = copyByteBuffer( yPlane, i420Frame.yuvPlanes[I420_Y] );
vPlane = copyByteBuffer( vPlane, i420Frame.yuvPlanes[I420_V] );
uPlane = copyByteBuffer( uPlane, i420Frame.yuvPlanes[I420_U] );
// Set the width and height of the frame.
width = i420Frame.width;
height = i420Frame.height;
}
}
/**
* Copies the entire contents of a ByteBuffer into a byte array.
* If the byte array exists, and is the correct size, it will be reused.
* If the byte array is null, or isn't properly sized, a new byte array will be created.
* @param dst A byte array to copy the ByteBuffer contents to. Can be null.
* @param src A ByteBuffer to copy data from.
* @return A byte array containing the contents of the ByteBuffer. If the provided dst was non-null and the correct size,
* it will be returned. If not, a new byte array will be created.
*/
private byte[] copyByteBuffer( @Nullable byte[] dst, @NonNull final ByteBuffer src )
{
// Create a new byte array if necessary.
byte[] out;
if ( ( null == dst ) || ( dst.length != src.capacity() ) )
{
out = new byte[ src.capacity() ];
}
else
{
out = dst;
}
// Copy the ByteBuffer's contents to the byte array.
src.get( out );
return out;
}
/**
* Copy the Y, V, and U planes from the source I420Frame, cropping them to square.
* Sets width and height.
* @param i420Frame Source frame.
*/
private void copyPlanesCropped( final VideoRenderer.I420Frame i420Frame )
{
synchronized ( planeLock )
{
// Verify that the dimensions of the I420Frame are appropriate for cropping
// If improper dimensions are found, default back to copying the entire frame.
final int width = i420Frame.width;
final int height = i420Frame.height;
if ( width > height )
{
// Calculate the size of the cropped portion of the the image
// The cropped width must be divisible by 4, since it will be divided by 2 to crop the center of the frame,
// and then divided by two again for processing the U and V planes, as each value there corresponds to
// a 2x2 square of pixels. All of those measurements must be whole integers.
final int cropWidth = width - height;
if ( ( cropWidth % 4 ) == 0 )
{
// Create a row buffer for the crop method to use - the largest row width will be equal to the source frame's height (since we're cropping to square)
final byte[] row = new byte[height]; // TODO: Create a static row buffer, so this doesn't get created for every frame?
// Copy the Y plane. Existing yPlane is passed in for possible reuse if it's the same size.
yPlane = cropByteBuffer( yPlane, i420Frame.yuvPlanes[I420_Y], width, height, row );
// Copy/crop the U and V planes. The U and V planes' width and height will be half that of the Y plane's.
// The same row buffer can be reused, since being oversize isn't an issue.
vPlane = cropByteBuffer( vPlane, i420Frame.yuvPlanes[I420_V], width / 2, height / 2, row );
uPlane = cropByteBuffer( uPlane, i420Frame.yuvPlanes[I420_U], width / 2, height / 2, row );
// Set size
// noinspection SuspiciousNameCombination (Shut up, Lint, I know what I'm doing.)
this.width = height;
this.height = height;
}
else
{
copyPlanes( i420Frame );
}
}
else
{
// Calculate the size of the cropped portion of the the image
// The cropped height must be divisible by 4, since it will be divided by 2 to crop the center of the frame,
// and then divided by two again for processing the U and V planes, as each value there corresponds to
// a 2x2 square of pixels. All of those measurements must be whole integers.
final int cropHeight = height - width;
if ( ( cropHeight % 4 ) == 0 )
{
// Copy the Y plane. (No row buffer is needed if height >= width.)
yPlane = cropByteBuffer( yPlane, i420Frame.yuvPlanes[I420_Y], width, height, null );
// Copy/crop the U and V planes. The U and V planes' width and height will be half that of the Y plane's.
// The same row buffer can be reused, since being oversize isn't an issue.
vPlane = cropByteBuffer( vPlane, i420Frame.yuvPlanes[I420_V], width / 2, height / 2, null );
uPlane = cropByteBuffer( uPlane, i420Frame.yuvPlanes[I420_U], width / 2, height / 2, null );
// Set size
// noinspection SuspiciousNameCombination (Shut up, Lint, I know what I'm doing.)
this.height = width;
this.width = width;
}
else
{
copyPlanes( i420Frame );
}
}
}
}
/**
* Copies the contents of a ByteBuffer into a byte array, cropping the center of the image to square.
* If the byte array exists, and is the correct size, it will be reused.
* If the byte array is null, or isn't properly sized, a new byte array will be created.
* @param dst A byte array to copy the ByteBuffer contents to. Can be null.
* @param src A ByteBuffer to copy data from.
* @param srcWidth The width of the source frame.
* @param srcHeight The height of ths source frame.
* @param row A byte array with a length equal to or greater than the cropped frame's width, for use as a buffer.
* Can be null. If no row buffer is provided and one is needed, or the buffer is too short, an exception
* will likely result.
* @return A byte array containing the cropped contents of the ByteBuffer. If the provided dst was non-null and the correct size,
* it will be returned. If not, a new byte array will be created.
* @throws NullPointerException
*/
private byte[] cropByteBuffer( @Nullable byte[] dst, @NonNull final ByteBuffer src, final int srcWidth, final int srcHeight, @Nullable final byte[] row )
throws NullPointerException
{
// If the frame is wider than it is tall, copy the center of each row to trim off the left and right edges
if ( srcWidth > srcHeight )
{
// We'll need a row buffer, here. Throw an exception if we don't have one.
if ( null == row )
{
throw new NullPointerException( "YuvFrame.cropByteBffer: Need row buffer array, and the array provided was null." );
}
// Create a new destination byte array if necessary.
final int croppedSize = srcHeight * srcHeight;
final byte[] out;
if ( ( null == dst ) || ( dst.length != croppedSize ) )
{
out = new byte[croppedSize];
}
else
{
out = dst;
}
// Calculate where on each row to start copying
final int indent = ( srcWidth - srcHeight ) / 2;
// Copy the ByteBuffer
for ( int i = 0; i < srcHeight; i++ )
{
// Set the position of the ByteBuffer to the beginning of the current row,
// adding the calculated indent to trim off the left side.
src.position( ( i * srcWidth ) + indent );
// Copy the cropped row to the row buffer
src.get( row, 0, srcHeight );
// Copy the row buffer to the destination array
System.arraycopy( row, 0, out, i * srcHeight, srcHeight );
}
return out;
}
// If the frame is taller than it is wide, copy the center of the image, cropping off the top and bottom edges.
// NOTE: If the width and height are equal, this method should result in a straight copy of the source ByteBuffer,
// as the calculated row offset will be zero.
else
{
// Create a new destination byte array if necessary.
final int croppedSize = srcWidth * srcWidth;
final byte[] out;
if ( ( null == dst ) || ( dst.length != croppedSize ) )
{
out = new byte[croppedSize];
}
else
{
out = dst;
}
// Calculate where to start reading
final int start = ( ( srcHeight - srcWidth ) / 2 ) * srcWidth; // ((h-w)/2) is the number of rows to skip, multiply by w again to get the starting ByteBuffer position.
// Copy the ByteBuffer
// Since we need to take a sequential series of whole rows, only one copy is necessary
src.position( start );
src.get( out, 0, croppedSize );
return out;
}
}
/**
* Converts this YUV frame to an ARGB_8888 Bitmap. Applies stored rotation.
* Remaning code based on http://stackoverflow.com/a/12702836 by rics (http://stackoverflow.com/users/21047/rics)
* @return A new Bitmap containing the converted frame.
*/
public Bitmap getBitmap()
{
// Calculate the size of the frame
final int size = width * height;
// Allocate an array to hold the ARGB pixel data
final int[] argb = new int[size];
if ( rotationDegree == 90 || rotationDegree == -270 )
{
convertYuvToArgbRot90( argb );
// Create Bitmap from ARGB pixel data.
// noinspection SuspiciousNameCombination (Rotating image swaps width/height, name mismatch is fine, Lint.)
return Bitmap.createBitmap( argb, height, width, Bitmap.Config.ARGB_8888 );
}
else if ( rotationDegree == 180 || rotationDegree == -180 )
{
convertYuvToArgbRot180( argb );
// Create Bitmap from ARGB pixel data.
return Bitmap.createBitmap( argb, width, height, Bitmap.Config.ARGB_8888 );
}
else if ( rotationDegree == 270 || rotationDegree == -90 )
{
convertYuvToArgbRot270( argb );
// Create Bitmap from ARGB pixel data.
// noinspection SuspiciousNameCombination (Rotating image swaps width/height, name mismatch is fine, Lint.)
return Bitmap.createBitmap( argb, height, width, Bitmap.Config.ARGB_8888 );
}
else
{
convertYuvToArgbRot0( argb );
// Create Bitmap from ARGB pixel data.
return Bitmap.createBitmap( argb, width, height, Bitmap.Config.ARGB_8888 );
}
}
private void convertYuvToArgbRot0( final int[] outputArgb )
{
synchronized ( planeLock )
{
// Calculate the size of the frame
int size = width * height;
// Each U/V cell is overlaid on a 2x2 block of Y cells.
// Loop through the size of the U/V planes, and manage the 2x2 Y block on each iteration.
int u, v;
int y1, y2, y3, y4;
int p1, p2, p3, p4;
int rowOffset = 0; // Y and RGB array position is offset by an extra row width each iteration, since they're handled as 2x2 sections.
final int uvSize = size / 4; // U/V plane is one quarter the total size of the frame.
final int uvWidth = width / 2; // U/V plane width is half the width of the frame.
for ( int i = 0; i < uvSize; i++ )
{
// At the end of each row, increment the Y/RGB row offset by an extra frame width
if ( i != 0 && ( i % ( uvWidth ) ) == 0 )
{
rowOffset += width;
}
// Calculate the 2x2 grid indices
p1 = rowOffset + ( i * 2 );
p2 = p1 + 1;
p3 = p1 + width;
p4 = p3 + 1;
// Get the U and V values from the source.
u = uPlane[i] & 0xff;
v = vPlane[i] & 0xff;
u = u - 128;
v = v - 128;
// Get the Y values for the matching 2x2 pixel block
y1 = yPlane[p1] & 0xff;
y2 = yPlane[p2] & 0xff;
y3 = yPlane[p3] & 0xff;
y4 = yPlane[p4] & 0xff;
// Convert each YUV pixel to RGB
outputArgb[p1] = convertYuvToArgb( y1, u, v );
outputArgb[p2] = convertYuvToArgb( y2, u, v );
outputArgb[p3] = convertYuvToArgb( y3, u, v );
outputArgb[p4] = convertYuvToArgb( y4, u, v );
}
}
}
private void convertYuvToArgbRot90( final int[] outputArgb )
{
synchronized ( planeLock )
{
int u, v;
int y1, y2, y3, y4;
int p1, p2, p3, p4;
int d1, d2, d3, d4;
int uvIndex;
final int uvWidth = width / 2; // U/V plane width is half the width of the frame.
final int uvHeight = height / 2; // U/V plane height is half the height of the frame.
int rotCol;
int rotRow;
// Each U/V cell is overlaid on a 2x2 block of Y cells.
// Loop through the size of the U/V planes, and manage the 2x2 Y block on each iteration.
for ( int row = 0; row < uvHeight; row++ )
{
// Calculate the column on the rotated image from the row on the source image
rotCol = ( uvHeight - 1 ) - row;
for ( int col = 0; col < uvWidth; col++ )
{
// Calculate the row on the rotated image from the column on the source image
rotRow = col;
// Calculate the 2x2 grid indices
p1 = ( row * width * 2 ) + ( col * 2 );
p2 = p1 + 1;
p3 = p1 + width;
p4 = p3 + 1;
// Get the U and V values from the source.
uvIndex = ( row * uvWidth ) + col;
u = uPlane[uvIndex] & 0xff;
v = vPlane[uvIndex] & 0xff;
u = u - 128;
v = v - 128;
// Get the Y values for the matching 2x2 pixel block
y1 = yPlane[p1] & 0xff;
y2 = yPlane[p2] & 0xff;
y3 = yPlane[p3] & 0xff;
y4 = yPlane[p4] & 0xff;
// Calculate the destination 2x2 grid indices
d1 = ( rotRow * height * 2 ) + ( rotCol * 2 ) + 1;
d2 = d1 + height;
d3 = d1 - 1;
d4 = d3 + height;
// Convert each YUV pixel to RGB
outputArgb[d1] = convertYuvToArgb( y1, u, v );
outputArgb[d2] = convertYuvToArgb( y2, u, v );
outputArgb[d3] = convertYuvToArgb( y3, u, v );
outputArgb[d4] = convertYuvToArgb( y4, u, v );
}
}
}
}
private void convertYuvToArgbRot180( final int[] outputArgb )
{
synchronized ( planeLock )
{
// Calculate the size of the frame
int size = width * height;
// Each U/V cell is overlaid on a 2x2 block of Y cells.
// Loop through the size of the U/V planes, and manage the 2x2 Y block on each iteration.
int u, v;
int y1, y2, y3, y4;
int p1, p2, p3, p4;
int rowOffset = 0; // Y and RGB array position is offset by an extra row width each iteration, since they're handled as 2x2 sections.
final int uvSize = size / 4; // U/V plane is one quarter the total size of the frame.
final int uvWidth = width / 2; // U/V plane width is half the width of the frame.
final int invertSize = size - 1; // Store size - 1 so it doesn't have to be calculated 4x every iteration.
for ( int i = 0; i < uvSize; i++ )
{
// At the end of each row, increment the Y/RGB row offset by an extra frame width
if ( i != 0 && ( i % ( uvWidth ) ) == 0 )
{
rowOffset += width;
}
// Calculate the 2x2 grid indices
p1 = rowOffset + ( i * 2 );
p2 = p1 + 1;
p3 = p1 + width;
p4 = p3 + 1;
// Get the U and V values from the source.
u = uPlane[i] & 0xff;
v = vPlane[i] & 0xff;
u = u - 128;
v = v - 128;
// Get the Y values for the matching 2x2 pixel block
y1 = yPlane[p1] & 0xff;
y2 = yPlane[p2] & 0xff;
y3 = yPlane[p3] & 0xff;
y4 = yPlane[p4] & 0xff;
// Convert each YUV pixel to RGB
outputArgb[invertSize - p1] = convertYuvToArgb( y1, u, v );
outputArgb[invertSize - p2] = convertYuvToArgb( y2, u, v );
outputArgb[invertSize - p3] = convertYuvToArgb( y3, u, v );
outputArgb[invertSize - p4] = convertYuvToArgb( y4, u, v );
}
}
}
// TODO: This is just rot90 reversed - would probably be a little faster if it was actually rotating -90 instead. Realistically, who cares.
private void convertYuvToArgbRot270( final int[] outputArgb )
{
synchronized ( planeLock )
{
// Calculate the size of the frame
int size = width * height;
int u, v;
int y1, y2, y3, y4;
int p1, p2, p3, p4;
int d1, d2, d3, d4;
int uvIndex;
final int uvWidth = width / 2; // U/V plane width is half the width of the frame.
final int uvHeight = height / 2; // U/V plane height is half the height of the frame.
final int invertSize = size - 1; // Store size - 1 so it doesn't have to be calculated 4x every iteration.
int rotCol;
int rotRow;
// Each U/V cell is overlaid on a 2x2 block of Y cells.
// Loop through the size of the U/V planes, and manage the 2x2 Y block on each iteration.
for ( int row = 0; row < uvHeight; row++ )
{
// Calculate the column on the rotated image from the row on the source image
rotCol = ( uvHeight - 1 ) - row;
for ( int col = 0; col < uvWidth; col++ )
{
// Calculate the row on the rotated image from the column on the source image
rotRow = col;
// Calculate the 2x2 grid indices
p1 = ( row * width * 2 ) + ( col * 2 );
p2 = p1 + 1;
p3 = p1 + width;
p4 = p3 + 1;
// Get the U and V values from the source.
uvIndex = ( row * uvWidth ) + col;
u = uPlane[uvIndex] & 0xff;
v = vPlane[uvIndex] & 0xff;
u = u - 128;
v = v - 128;
// Get the Y values for the matching 2x2 pixel block
y1 = yPlane[p1] & 0xff;
y2 = yPlane[p2] & 0xff;
y3 = yPlane[p3] & 0xff;
y4 = yPlane[p4] & 0xff;
// Calculate the destination 2x2 grid indices
d1 = ( rotRow * height * 2 ) + ( rotCol * 2 ) + 1;
d2 = d1 + height;
d3 = d1 - 1;
d4 = d3 + height;
// Convert each YUV pixel to RGB
outputArgb[invertSize - d1] = convertYuvToArgb( y1, u, v );
outputArgb[invertSize - d2] = convertYuvToArgb( y2, u, v );
outputArgb[invertSize - d3] = convertYuvToArgb( y3, u, v );
outputArgb[invertSize - d4] = convertYuvToArgb( y4, u, v );
}
}
}
}
private int convertYuvToArgb( final int y, final int u, final int v )
{
int r, g, b;
// Convert YUV to RGB
r = y + (int)(1.402f*v);
g = y - (int)(0.344f*u +0.714f*v);
b = y + (int)(1.772f*u);
// Clamp RGB values to [0,255]
r = ( r > 255 ) ? 255 : ( r < 0 ) ? 0 : r;
g = ( g > 255 ) ? 255 : ( g < 0 ) ? 0 : g;
b = ( b > 255 ) ? 255 : ( b < 0 ) ? 0 : b;
// Shift the RGB values into position in the final ARGB pixel
return 0xff000000 | (b<<16) | (g<<8) | r;
}
}
@priteshimp
Copy link

How to use this?

@marwa1994
Copy link

How to use this? i try to use it in renderFrame but i always get width and height equals to 0

@sandeep5193
Copy link

@marwa1994 that's because you're looking at remote renderer. for local renderer it works. remote renderer frame is not YUV.

@sandeep5193
Copy link

@JBurkeKF anything can be done when i420Frame.yuvFrame is false? remote frames are of type RendererType.RENDERER_TEXTURE.

@buttiiburak
Copy link

Are there any method to convert back bitmap to I420Frame?

Copy link

ghost commented Feb 19, 2020

@marwa1994 that's because you're looking at remote renderer. for local renderer it works. remote renderer frame is not YUV.

I'm not a pro about all this, but I am able to get a bitmap from the remote using this.
PS: Thank you very much JBurkeKF

@JBurkeKF
Copy link
Author

We upgraded our WebRTC project away from libjingle to use Google's official WebRTC library. Here's the updated YuvFrame.java that works with all the library changes.

@JBurkeKF
Copy link
Author

I've posted the iOS version of this, I420Frame, done in Objective-C.

@cyrus88
Copy link

cyrus88 commented Oct 28, 2020

It produces black and white bitmap. Please suggest a way to correct it.

@sushantNoida
Copy link

@OverRide
public void onFrame(VideoFrame videoFrame) {
if(yuvFrame == null) {
yuvFrame = new YuvFrame(videoFrame, PROCESSING_NONE, appContext);
}
}

then on another thread I am getting bitmap from it which is running 100ms.

if(yuvFrame != null) {
cameraBitmap = yuvFrame.getBitmap();
yuvFrame.dispose();
yuvFrame = null;
}

Problem I am facing is getting memory crash after 6 minutes. Do I need to release someeting in yuvFrame.

@A-KVerma
Copy link

@sushantNoida I am also trying to convert webRTC videoFrame into a bitmap and facing the same issue. Did you able to figure it out

@cyrus88
Copy link

cyrus88 commented Oct 29, 2021 via email

@A-KVerma
Copy link

A-KVerma commented Oct 30, 2021

This is how I am using it ->
fun captureBitmapFromYuvFrame(frame: VideoFrame): Bitmap? {

    val yuvPlanes = ArrayList<ByteBuffer>()
    yuvPlanes.add(frame.buffer.toI420().dataY) // if I comment out this line, no memory increase but if it is uncommented memory is shooting up 
    yuvPlanes.clear()

}

I have commented out all the logic of creating YuvImage and creating bitmap with it.

Any idea how do I clear it. Looks like .clear() is not helping here. 

If I am trying to release the frame using frame.release(), I am getting this error

release is being called of an object with refcount < 1

Seems like it is videoFrame is already being released internally by webRTC

@cyrus88
Copy link

cyrus88 commented Oct 30, 2021 via email

@A-KVerma
Copy link

Thanks for your quick response

I am not using copyPlanes method here, I am converting bitmap from VideoFrame like this :
fun captureBitmapFromYuvFrame(i420Frame: VideoFrame): Bitmap? {

    val yuvPlanes = ArrayList<ByteBuffer>()
    yuvPlanes.add(i420Frame.buffer.toI420().dataY)
    yuvPlanes.add(i420Frame.buffer.toI420().dataU)
    yuvPlanes.add(i420Frame.buffer.toI420().dataV)

    val yuvStrides = ArrayList<Int>()
    yuvStrides.add(i420Frame.buffer.toI420().strideY)
    yuvStrides.add(i420Frame.buffer.toI420().strideU)
    yuvStrides.add(i420Frame.buffer.toI420().strideV)

    val yuvImage: YuvImage = i420ToYuvImage(
        yuvPlanes
        yuvStrides,
        i420Frame.buffer.toI420().width,
        i420Frame.buffer.toI420().height
    )
    val stream = ByteArrayOutputStream()
    val rect = Rect(0, 0, yuvImage.width, yuvImage.height)

    // Compress YuvImage to jpeg
   yuvImage.compressToJpeg(rect, 100, stream)
    // Convert jpeg to Bitmap
    val imageBytes: ByteArray = stream.toByteArray()
    var bitmap = BitmapFactory.decodeByteArray(imageBytes, 0, imageBytes.size)
    val matrix = Matrix()

    // Apply any needed rotation
    matrix.postRotate(i420Frame.rotation.toFloat())
 
    bitmap = Bitmap.createBitmap(
        bitmap, 0, 0, bitmap.width, bitmap.height, matrix,
        true
    )
    for (item in yuvPlanes){
        item.clear()
    }
    stream.close()

    return bitmap
}

After this I am doing some processing on this retured bitmap and creating a new VideoFrame out of it, 
I have commented all the code in this captureBitmapFromYuvFrame function just debugging with the first two lines of code even I am doing clear after that still I am seeing sharp increase in memory and a crash after some time.
If i do videoFrame.release(), I am getting compilation error : 
**release is being called of an object with refcount < 1**

Am I converting frame to bitmap in an incorrect way or do I have to clear something or is there any other optimized way which I should follow.

@sidal
Copy link

sidal commented Oct 6, 2022

It produces black and white bitmap. Please suggest a way to correct it.

@cyrus88 I am also getting black and white bitmap. Were you able to solve this?

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment