Skip to content

Instantly share code, notes, and snippets.

@VirtuosoChris
Created March 2, 2019 01:55
Show Gist options
  • Save VirtuosoChris/3a57b96411addfe4bbc3c7ae74a0b403 to your computer and use it in GitHub Desktop.
Save VirtuosoChris/3a57b96411addfe4bbc3c7ae74a0b403 to your computer and use it in GitHub Desktop.
Android video player code.
/**
* Created by chrispugh on 8/13/16.
*/
package com.virtuosoengine;
import android.app.Activity;
import android.content.res.AssetFileDescriptor;
import android.graphics.SurfaceTexture;
import android.media.AudioManager;
import android.media.MediaPlayer;
import android.util.Log;
import android.view.Surface;
import android.media.MediaMetadataRetriever;
import java.io.IOException;
import java.util.HashMap;
public class SurfaceTexturePlayer implements android.graphics.SurfaceTexture.OnFrameAvailableListener,
MediaPlayer.OnVideoSizeChangedListener,
MediaPlayer.OnCompletionListener,
MediaPlayer.OnErrorListener
///, AudioManager.OnAudioFocusChangeListener
{
SurfaceTexture surfaceTex = null;
Surface videoSurface = null;
MediaPlayer mediaPlayer = null;
Activity activity=null;
//boolean frameAvailable = false;
boolean videoFinished = false;
int frameProcessedCounter = 0;
int frameAvailableCounter = 0;
int width = 0;
int height = 0;
float volume = .333f;
public static final String logTag = "FireworksShowVR";
public void populateDimensions(String videoPath)
{
MediaMetadataRetriever retriever = new MediaMetadataRetriever();
retriever.setDataSource(videoPath, new HashMap<String, String>());
width = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH));
height = Integer.valueOf(retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT));
retriever.release();
Log.d(logTag, "STREAMING VIDEO DIMENSIONS ARE : " + width + " " + height);
}
public void update()
{
while (frameProcessedCounter != frameAvailableCounter)
{
surfaceTex.updateTexImage();
frameProcessedCounter++;
}
}
public SurfaceTexturePlayer(java.lang.Object ctx, String pathName, int texHandle)
{
activity = (Activity)ctx;
activity.setVolumeControlStream(AudioManager.STREAM_MUSIC);
Log.d(logTag, pathName);
// Request audio focus
///requestAudioFocus();
// Have native code pause any playing movie,
// allocate a new external texture,
// and create a surfaceTexture with it.
surfaceTex = new SurfaceTexture(texHandle);
surfaceTex.setOnFrameAvailableListener(this);
videoSurface = new Surface(surfaceTex);
if (mediaPlayer != null)
{
mediaPlayer.release();
}
mediaPlayer = new MediaPlayer();
mediaPlayer.setOnVideoSizeChangedListener(this);
mediaPlayer.setOnCompletionListener(this);
mediaPlayer.setSurface(videoSurface);
try {
Log.v(logTag, "mediaPlayer.setDataSource()");
android.content.Context context = (android.content.Context)ctx;
mediaPlayer.setDataSource(pathName);
} catch (IOException t) {
Log.e(logTag, "mediaPlayer.setDataSource failed");
}
try {
Log.v(logTag, "mediaPlayer.prepare");
mediaPlayer.prepare();
} catch (IOException t) {
Log.e(logTag, "mediaPlayer.prepare failed:" + t.getMessage());
}
mediaPlayer.setLooping(true);
mediaPlayer.setVolume(0.0f, 0.0f);
mediaPlayer.start();
mediaPlayer.pause();
populateDimensions(pathName);
}
public void play()
{
try
{
Log.v(logTag, "mediaPlayer.start");
videoFinished=false;
mediaPlayer.setVolume(volume,volume);
mediaPlayer.start();
//android.Activity.setVolumeControlStream();
//mediaPlayer.seekTo(0);
activity.setVolumeControlStream(AudioManager.STREAM_MUSIC);
}
catch( IllegalStateException ise ) {
Log.d( logTag, "mediaPlayer.start(): Caught illegalStateException: " + ise.toString() );
}
}
public void onFrameAvailable(SurfaceTexture surfaceTexture)
{
//frameAvailable = true;
frameAvailableCounter++;
}
@Override
public void onCompletion(MediaPlayer mediaPlayer)
{
Log.d(logTag, "VIDEO COMPLETED");
videoFinished = true;
mediaPlayer.pause();
mediaPlayer.seekTo(0);
}
@Override
public boolean onError(MediaPlayer mediaPlayer, int i, int i1)
{
Log.e(logTag, "VIDEO ON ERROR");
return false;
}
@Override
public void onVideoSizeChanged(MediaPlayer mediaPlayer, int i, int i1)
{
Log.d(logTag, "ON VIDEO SIZE CHANGED");
}
public void pause()
{
try
{
if (mediaPlayer.isPlaying())
{
mediaPlayer.pause();
}
}
catch( IllegalStateException ise )
{
Log.d( logTag, "mediaPlayer.start(): Caught illegalStateException: " + ise.toString() );
}
}
}
// converts a SamplerExternalOES texture from MediaPlayer to
class VideoInput : public ShaderToyInput
{
private:
gl::Program convertProg;
gl::Texture externalTex;
gl::Framebuffer convertFBO;
JavaObject surfaceTexturePlayerObject;
struct android_app * app;
JNIEnv* env;
public:
static const std::string videoConvertVert;
static const std::string videoConvertFrag;
virtual GLenum textureTarget()
{
return GL_TEXTURE_2D;
}
VideoInput(struct android_app * appIn, JNIEnv* envIn, const std::string& url)
: convertProg(Virtuoso::GL::Program({
Virtuoso::GL::Shader(GL_VERTEX_SHADER,
videoConvertVert),
Virtuoso::GL::Shader(GL_FRAGMENT_SHADER,
videoConvertFrag)
})),
app(appIn),
env(envIn)
{
glActiveTexture(GL_TEXTURE8);
glBindTexture(GL_TEXTURE_EXTERNAL_OES, externalTex.name);
Virtuoso::GL::setFilterBilinear(externalTex);
prepareVideo(url, externalTex);
width = surfaceTexturePlayerObject.getField<jint>("width");
height = surfaceTexturePlayerObject.getField<jint>("height");
texture.Image2D(GL_TEXTURE_2D, 0, GL_RGB, width,height, 0, GL_RGB, GL_UNSIGNED_BYTE, NULL);
convertFBO.Texture(GL_COLOR_ATTACHMENT0, texture, 0);
convertProg.Uniform("iResolution", (float)width, (float)height);
convertProg.Uniform("vidTex", 8);
}
void onPaused()
{
surfaceTexturePlayerObject.call<void>("pause");
}
void onFinished()
{
}
void prepareVideo(const std::string& file, const gl::Texture& tex)
{
JavaClass surfaceTexturePlayerClass = loadClass(app, env, "com/virtuosoengine/SurfaceTexturePlayer");
jstring fileStrJava = env->NewStringUTF(file.c_str());
surfaceTexturePlayerObject = surfaceTexturePlayerClass.construct<jobject, jstring, int>(app->activity->clazz, fileStrJava, tex.name);
};
void start()
{
surfaceTexturePlayerObject.call<void>("play");
}
void convertFrame()
{
convertProg.Use();
GLint oldFB = gl::Get<GLint>(GL_FRAMEBUFFER_BINDING);
convertFBO.Bind(GL_FRAMEBUFFER);
glViewport (0, 0, width, height);
glClear(GL_COLOR_BUFFER_BIT); ///\todo don't need to bind a depth right ?
glDrawArrays(GL_TRIANGLES, 0, 6);
glBindFramebuffer(GL_FRAMEBUFFER, oldFB);
texture.GenerateMipmap();
}
void update()
{
if (surfaceTexturePlayerObject.call<jboolean>("update"))
{
convertFrame();
}
if (surfaceTexturePlayerObject.getField<jboolean>("videoFinished"))
{
onFinished();
}
}
};
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment