public class GLDrawer2D {
private static final boolean DEBUG = true; // TODO set false on release
private static final String TAG = "GLDrawer2D";
private static final String vss
= "uniform mat4 uMVPMatrix;\n"
+ "uniform mat4 uTexMatrix;\n"
+ "attribute highp vec4 aPosition;\n"
+ "attribute highp vec4 aTextureCoord;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "\n"
+ "void main() {\n"
+ " gl_Position = uMVPMatrix * aPosition;\n"
+ " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
+ "}\n";
private static final String fss
= "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying highp vec2 vTextureCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
+ "}";
private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };
private final FloatBuffer pVertex;
private final FloatBuffer pTexCoord;
private int hProgram;
int maPositionLoc;
int maTextureCoordLoc;
int muMVPMatrixLoc;
int muTexMatrixLoc;
private final float[] mMvpMatrix = new float[16];
private static final int FLOAT_SZ = Float.SIZE / 8;
private static final int VERTEX_NUM = 4;
private static final int VERTEX_SZ = VERTEX_NUM * 2;
/**
* Constructor
* this should be called in GL context
*/
public GLDrawer2D() {
pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pVertex.put(VERTICES);
pVertex.flip();
pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
.order(ByteOrder.nativeOrder()).asFloatBuffer();
pTexCoord.put(TEXCOORD);
pTexCoord.flip();
hProgram = loadShader(vss, fss);
GLES20.glUseProgram(hProgram);
maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");
Matrix.setIdentityM(mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glVertexAttribPointer(maPositionLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pVertex);
GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
GLES20.glEnableVertexAttribArray(maPositionLoc);
GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
}
/**
* terminatinng, this should be called in GL context
*/
public void release() {
if (hProgram >= 0)
GLES20.glDeleteProgram(hProgram);
hProgram = -1;
}
/**
* draw specific texture with specific texture matrix
* @param tex_id texture ID
* @param tex_matrix texture matrix、if this is null, the last one use(we don't check size of this array and needs at least 16 of float)
*/
public void draw(final int tex_id, final float[] tex_matrix) {
GLES20.glUseProgram(hProgram);
if (tex_matrix != null)
GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex_id);
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
GLES20.glUseProgram(0);
}
/**
* create external texture
* @return texture ID
*/
public static int initTex() {
if (DEBUG) Log.v(TAG, "initTex:");
final int[] tex = new int[1];
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
return tex[0];
}
/**
* delete specific texture
*/
public static void deleteTex(final int hTex) {
if (DEBUG) Log.v(TAG, "deleteTex:");
final int[] tex = new int[] {hTex};
GLES20.glDeleteTextures(1, tex, 0);
}
/**
* load, compile and link shader
* @param vss source of vertex shader
* @param fss source of fragment shader
* @return
*/
public static int loadShader(final String vss, final String fss) {
if (DEBUG) Log.v(TAG, "loadShader:");
int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vs, vss);
GLES20.glCompileShader(vs);
final int[] compiled = new int[1];
GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.e(TAG, "Failed to compile vertex shader:"
+ GLES20.glGetShaderInfoLog(vs));
GLES20.glDeleteShader(vs);
vs = 0;
}
int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fs, fss);
GLES20.glCompileShader(fs);
GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
if (compiled[0] == 0) {
if (DEBUG) Log.w(TAG, "Failed to compile fragment shader:"
+ GLES20.glGetShaderInfoLog(fs));
GLES20.glDeleteShader(fs);
fs = 0;
}
final int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vs);
GLES20.glAttachShader(program, fs);
GLES20.glLinkProgram(program);
return program;
}
public float[] getMvpMatrxi() {
return mMvpMatrix;
}
}
Last active
August 24, 2022 08:42
-
-
Save GerrieWell/d55430e6e06719023435c3e5ec815588 to your computer and use it in GitHub Desktop.
RecordSurfaceRenderHandler.md
public final class RecordSurfaceRenderHandler extends Handler {
private static final boolean DEBUG = false; // TODO set false on release
private static final String TAG = "RecordSurfaceRenderH";
private static final int MSG_RENDER_SET_GLCONTEXT = 1;
private int mTexId = -1;
private final RenderThread mThread;
/**安全获得RenderHandler*/
public static RecordSurfaceRenderHandler createHandler() {
if (DEBUG) Log.v(TAG, "createHandler:");
return createHandler("RenderSurfaceThread");
}
private static final RecordSurfaceRenderHandler createHandler(final String name) {
if (DEBUG) Log.v(TAG, "createHandler:name=" + name);
final RenderThread thread = new RenderThread(name);
thread.start();
return thread.getHandler();
}
public final void setEglContext(final EGLContext shared_context, final int tex_id, final Object surface, final boolean isRecordable) {
if (DEBUG) Log.i(TAG, "RenderHandler:setEglContext:");
if (!(surface instanceof Surface) && !(surface instanceof SurfaceTexture) && !(surface instanceof SurfaceHolder))
throw new RuntimeException("unsupported window type:" + surface);
mTexId = tex_id;
sendMessage(obtainMessage(MSG_RENDER_SET_GLCONTEXT, isRecordable ? 1 : 0, 0, new ContextParams(shared_context, surface)));
}
public final void release() {
if (DEBUG) Log.i(TAG, "release:");
removeMessages(MSG_RENDER_SET_GLCONTEXT);
removeMessages(MSG_RENDER_DRAW);
sendEmptyMessage(MSG_RENDER_QUIT);
}
@Override
public final void handleMessage(final Message msg) {
switch (msg.what) {
case MSG_RENDER_SET_GLCONTEXT:
final ContextParams params = (ContextParams)msg.obj;
mThread.handleSetEglContext(params.shared_context, params.surface, msg.arg1 != 0);
break;
case MSG_RENDER_DRAW:
//mThread.handleDraw(msg.arg1, (float[])msg.obj); //@wei 0604
mThread.handleDraw(mTexId,(float[])msg.obj);
break;
case MSG_RENDER_DRAW2:
long timestamp = (((long) msg.arg1) << 32) |
(((long) msg.arg2) & 0xffffffffL);
mThread.handleFrameAvailable(mTexId, (float[]) msg.obj, timestamp);
//mThread.handleDrain();
break;
case MSG_CHECK_VALID:
synchronized (mThread.mSync) {
mThread.mSync.notify();
}
break;
case MSG_RENDER_QUIT:
Looper.myLooper().quit();
break;
default:
super.handleMessage(msg);
}
}
//********************************************************************************
private RecordSurfaceRenderHandler(final RenderThread thread) {
if (DEBUG) Log.i(TAG, "RenderHandler:");
mThread = thread;
}
private static final class ContextParams {
final EGLContext shared_context;
final Object surface;
public ContextParams(final EGLContext shared_context, final Object surface) {
this.shared_context = shared_context;
this.surface = surface;
}
}
/**
* Thread to execute render methods
* You can also use HandlerThread insted of this and create Handler from its Looper.
*/
private static final class RenderThread extends Thread {
private static final String TAG_THREAD = "RenderThread";
private final Object mSync = new Object();
private RecordSurfaceRenderHandler mHandler;
private EGLBase mEgl;
private EGLBase.EglSurface mTargetSurface;
private Surface mSurface;
private GLDrawer2D mDrawer;
public RenderThread(final String name) {
super(name);
}
public final RecordSurfaceRenderHandler getHandler() {
synchronized (mSync) {
// create rendering thread
try {
mSync.wait();
} catch (final InterruptedException e) {
}
}
return mHandler;
}
/**
* Set shared context and Surface
* @param shard_context
* @param surface
*/
public final void handleSetEglContext(final EGLContext shard_context, final Object surface, final boolean isRecordable) {
if (DEBUG) Log.i(TAG_THREAD, "setEglContext:");
release();
synchronized (mSync) {
mSurface = surface instanceof Surface ? (Surface)surface
: (surface instanceof SurfaceTexture ? new Surface((SurfaceTexture)surface) : null);
}
mEgl = new EGLBase(shard_context, false, isRecordable);
mTargetSurface = mEgl.createFromSurface(surface);
mDrawer = new GLDrawer2D();
}
/**
* drawing @wei 这里也很重要。Texture 创建保存在 GPU内存里.不同shader可以共用一个texture 。
* @param tex_id
* @param tex_matrix
*/
public void handleDraw(final int tex_id, final float[] tex_matrix) {
if (DEBUG) Log.i(TAG_THREAD, "draw");
if (tex_id >= 0) {
mTargetSurface.makeCurrent();
mDrawer.draw(tex_id, tex_matrix);
mTargetSurface.swap();
}
}
/**
* @wei 未启动record时, 线程会跑几帧后在swap阻塞.猜测是由于 InputSurface无效.
* v2 加判断,Recording时才真正画
* @param tex_id
* @param transform
* @param timestampNanos
*/
private void handleFrameAvailable(int tex_id,float[] transform, long timestampNanos) {
Log.v(TAG,"handleFrameAvailable #0");
SurfaceEncoder mVideoEncoder = SurfaceEncoder.getInstance();
if(mVideoEncoder==null || !mVideoEncoder.isRecording())
return;
Log.d(TAG, "handleDrain: #3");
mVideoEncoder.drainAllEncoderMuxer(false);
mDrawer.draw(tex_id, transform);
mTargetSurface.setPresentationTime(timestampNanos);
mTargetSurface.swap();
Log.v(TAG,"handleFrameAvailable #1");
}
@Override
public final void run() {
if (DEBUG) Log.v(TAG_THREAD, "started");
Looper.prepare();
synchronized (mSync) {
mHandler = new RecordSurfaceRenderHandler(this);
mSync.notify();
}
Looper.loop();
if (DEBUG) Log.v(TAG_THREAD, "finishing");
release();
synchronized (mSync) {
mHandler = null;
}
if (DEBUG) Log.v(TAG_THREAD, "finished");
}
private final void release() {
if (DEBUG) Log.v(TAG_THREAD, "release:");
if (mDrawer != null) {
mDrawer.release();
mDrawer = null;
}
synchronized (mSync) {
mSurface = null;
}
if (mTargetSurface != null) {
clear();
mTargetSurface.release();
mTargetSurface = null;
}
if (mEgl != null) {
mEgl.release();
mEgl = null;
}
}
/**
* Fill black on specific Surface
*/
private final void clear() {
if (DEBUG) Log.v(TAG_THREAD, "clear:");
mTargetSurface.makeCurrent();
GLES20.glClearColor(0, 0, 0, 1);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
mTargetSurface.swap();
}
}
}
public void drainAllEncoderMuxer(boolean endOfStream) {
final int TIMEOUT_USEC = 10000;
if (VERBOSE) Log.d(TAG, "drainAllEncoderMuxer(" + endOfStream + ")");
if (endOfStream) {
if (VERBOSE) Log.d(TAG, "sending EOS to mEncoder");
mEncoder.signalEndOfInputStream();
}
recordAndMuxingAudio();
try {
if(mOuputFile==null)
mOuputFile = new FileOutputStream(Environment.getExternalStorageDirectory()+"/test.h264");
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
Log.v(TAG, "bufferInfo f:" + mBufferInfo.flags + "\tpts:" + mBufferInfo.presentationTimeUs);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
Log.v(TAG,"INFO_TRY_AGAIN_LATER");
// no output available yet
if (!endOfStream) {
break; // out of while
} else {
if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS");
}
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an mEncoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.i(TAG, "INFO_OUTPUT_FORMAT_CHANGED");
// should happen before receiving buffers, and should only happen once
if (mMuxerStarted) {
throw new RuntimeException("format changed twice");
}
MediaFormat newFormat = mEncoder.getOutputFormat();
Log.d(TAG, "mEncoder output format changed: " + newFormat);
if(mMuxer!=null){
mVideoTrackIndex = mMuxer.addTrack(newFormat);
tryStartMuxer();
//mMuxer.start();
}
if(mOuputFile!=null){
ByteBuffer sps = newFormat.getByteBuffer("csd-0");
ByteBuffer pps = newFormat.getByteBuffer("csd-1");
mOuputFile.write(sps.array());
mOuputFile.write(pps.array());
}
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from mEncoder.dequeueOutputBuffer: " +
encoderStatus);
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// @wei The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0; //or continue
}
if (mBufferInfo.size != 0 /*&& mBufferInfo.presentationTimeUs!=0*/) { //pts is for test todo the first frame
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
if(mVideoStartStampUs==0){
mBufferInfo.presentationTimeUs = 0;
mVideoStartStampUs = System.currentTimeMillis() * 1000;
}else
mBufferInfo.presentationTimeUs = System.currentTimeMillis() * 1000 - mVideoStartStampUs;
//mBufferInfo.presentationTimeUs - mVideoStartStampUs;
if(mMuxer!=null && mMuxerStarted)
mMuxer.writeSampleData(mVideoTrackIndex, encodedData, mBufferInfo);
if(mOuputFile !=null){
byte[] outData = new byte[mBufferInfo.size]; //copy protected buffer.
encodedData.get(outData);
mOuputFile.write(outData);
}
if (VERBOSE)
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +mBufferInfo.presentationTimeUs);
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
if (!endOfStream) {
Log.w(TAG, "reached end of stream unexpectedly");
} else {
if (VERBOSE) Log.d(TAG, "end of stream reached");
}
break; // out of while
}
}//END OF WHILE true
}
} catch (IOException e) {
e.printStackTrace();
} //不给音频单独线程.
if(mMuxerStarted){
synchronized (sMuxSync){
sMuxSync.notifyAll();
}
}
/* if(stopCounterTest++ >=400){
stopCounterTest = 0;
mMuxer.stop();
mMuxer.release();
}*/
}
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment