Skip to content

Instantly share code, notes, and snippets.

@n1xx1
Last active May 7, 2021 09:27
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save n1xx1/bf64a55c188291290ab7ba0fc86ce1d7 to your computer and use it in GitHub Desktop.
Save n1xx1/bf64a55c188291290ab7ba0fc86ce1d7 to your computer and use it in GitHub Desktop.
package it.inrebus.skyte.rtcclient;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import org.webrtc.JavaI420Buffer;
import org.webrtc.SurfaceTextureHelper;
import org.webrtc.VideoCapturer;
import org.webrtc.VideoFrame;
import java.nio.ByteBuffer;
public class CustomCapturer implements VideoCapturer {
private SurfaceTextureHelper surTexture;
private Context appContext;
private org.webrtc.CapturerObserver capturerObs;
private Thread captureThread;
@Override
public void initialize(SurfaceTextureHelper surfaceTextureHelper, Context applicationContext, org.webrtc.CapturerObserver capturerObserver) {
surTexture = surfaceTextureHelper;
appContext = applicationContext;
capturerObs = capturerObserver;
}
@Override
public void startCapture(int width, int height, int fps) {
captureThread = new Thread(() -> {
try {
long start = System.nanoTime();
capturerObs.onCapturerStarted(true);
while (true) {
JavaI420Buffer buffer = JavaI420Buffer.allocate(width, height);
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
Canvas canvas = new Canvas(bitmap);
canvas.drawRGB(255, 255, 255);
Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setAlpha(0xff);
canvas.drawRect(100, 100, 200, 200, paint);
bitmapToI420(bitmap, buffer);
long frameTime = System.nanoTime() - start;
VideoFrame videoFrame = new VideoFrame(buffer, 0, frameTime);
capturerObs.onFrameCaptured(videoFrame);
Thread.sleep(10);
}
} catch(InterruptedException ex) {
ex.printStackTrace();
}
});
captureThread.start();
}
@Override
public void stopCapture() {
captureThread.interrupt();
}
@Override
public void changeCaptureFormat(int width, int height, int fps) {
}
@Override
public void dispose() {
}
@Override
public boolean isScreencast() {
return false;
}
private static void bitmapToI420(Bitmap src, JavaI420Buffer dest) {
int width = src.getWidth();
int height = src.getHeight();
if(width != dest.getWidth() || height != dest.getHeight())
return;
int strideY = dest.getStrideY();
int strideU = dest.getStrideU();
int strideV = dest.getStrideV();
ByteBuffer dataY = dest.getDataY();
ByteBuffer dataU = dest.getDataU();
ByteBuffer dataV = dest.getDataV();
for(int line = 0; line < height; line++) {
if(line % 2 == 0) {
for (int x = 0; x < width; x += 2) {
int px = src.getPixel(x, line);
byte r = (byte) ((px >> 16) & 0xff);
byte g = (byte) ((px >> 8) & 0xff);
byte b = (byte) (px & 0xff);
dataY.put(line * strideY + x, (byte) (((66 * r + 129 * g + 25 * b) >> 8) + 16));
dataU.put(line / 2 * strideU + x / 2, (byte) (((-38 * r + -74 * g + 112 * b) >> 8) + 128));
dataV.put(line / 2 * strideV + x / 2, (byte) (((112 * r + -94 * g + -18 * b) >> 8) + 128));
px = src.getPixel(x + 1, line);
r = (byte) ((px >> 16) & 0xff);
g = (byte) ((px >> 8) & 0xff);
b = (byte) (px & 0xff);
dataY.put(line * strideY + x, (byte) (((66 * r + 129 * g + 25 * b) >> 8) + 16));
}
} else {
for (int x = 0; x < width; x += 1) {
int px = src.getPixel(x, line);
byte r = (byte) ((px >> 16) & 0xff);
byte g = (byte) ((px >> 8) & 0xff);
byte b = (byte) (px & 0xff);
dataY.put(line * strideY + x, (byte) (((66 * r + 129 * g + 25 * b) >> 8) + 16));
}
}
}
}
}
@francescogatto
Copy link

Is this working? Thanks!

@n1xx1
Copy link
Author

n1xx1 commented Mar 17, 2021

Is this working? Thanks!

In this one I had some issues with the i420 conversion, in another test I used the webrtc bundled converter that uses the gpu. You can check it out here: https://gist.github.com/n1xx1/2cd38043838e259969bce983ce21ffaa

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment