Skip to content

Instantly share code, notes, and snippets.

@entrypointkr
Last active April 4, 2022 08:24
Show Gist options
  • Save entrypointkr/12a575e3d0b48526394b2c67c5ac1710 to your computer and use it in GitHub Desktop.
Save entrypointkr/12a575e3d0b48526394b2c67c5ac1710 to your computer and use it in GitHub Desktop.
Minecraft draw video frame using JavaCV, OpenGL
import org.bytedeco.javacv.FFmpegFrameGrabber;
import org.bytedeco.javacv.Frame;
import org.bytedeco.javacv.FrameGrabber;
import org.lwjgl.opengl.GL11;
import org.lwjgl.opengl.GL12;
import org.lwjgl.opengl.GL30;
import javax.sound.sampled.*;
import java.awt.image.*;
import java.io.File;
import java.nio.ByteBuffer;
import java.nio.ShortBuffer;
import java.util.Optional;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
public class JavaCVPlayer {
public static void startGrabber(
Consumer<Frame> f,
File file,
AtomicBoolean runR,
ExecutorService playExecutor,
ExecutorService imageExecutor,
ExecutorService audioExecutor
) {
playExecutor.submit(() -> {
try (FFmpegFrameGrabber grabber = new FFmpegFrameGrabber(file)) {
grabber.start();
SourceDataLine soundLine = openAudioLine(grabber).orElse(null);
if (soundLine != null) {
soundLine.start();
}
long maxReadAheadBufferMicros = 1000L * 1000L;
AtomicLong lastShowTimeR = new AtomicLong(-1);
AtomicLong lastFrameTimeR = new AtomicLong(-1);
Frame mutFrame;
while (runR.get() && (mutFrame = grabber.grab()) != null) {
if (lastShowTimeR.get() < 0) {
lastShowTimeR.set(System.nanoTime());
}
lastFrameTimeR.set(mutFrame.timestamp);
if (mutFrame.image != null) {
Frame frame = mutFrame.clone();
imageExecutor.submit(() -> {
try {
// TODO: close frame
f.accept(frame);
long delay = frame.timestamp - getTimeDiff(lastShowTimeR.get(), soundLine);
if (runR.get()) {
try {
Thread.sleep(delay > 0 ? delay / 1000L : 0);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
} catch (Throwable throwable) {
throwable.printStackTrace();
}
});
} else if (mutFrame.samples != null && soundLine != null) {
ShortBuffer buffer = (ShortBuffer) mutFrame.samples[0];
buffer.rewind();
ByteBuffer outBuffer = ByteBuffer.allocate(buffer.capacity() * 2);
for (int i = 0; i < buffer.capacity(); i++) {
short v = buffer.get(i);
outBuffer.putShort(v);
}
audioExecutor.submit(() -> {
soundLine.write(outBuffer.array(), 0, outBuffer.capacity());
outBuffer.clear();
});
}
long diff = mutFrame.timestamp - getTimeDiff(lastShowTimeR.get(), soundLine);
if (diff > maxReadAheadBufferMicros) {
Thread.sleep((diff - maxReadAheadBufferMicros) / 1000L);
}
}
// delay until last frame
if (runR.get() && !Thread.interrupted()) {
long delay = (lastFrameTimeR.get() - getTimeDiff(lastShowTimeR.get(), soundLine)) / 1000L
+ Math.round(1 / grabber.getFrameRate() * 1000L);
Thread.sleep(Math.max(0, delay));
}
if (soundLine != null) {
soundLine.close();
}
runR.set(false);
} catch (Throwable ex) {
ex.printStackTrace();
}
});
}
public static Optional<SourceDataLine> openAudioLine(FrameGrabber grabber) {
if (grabber.getAudioChannels() > 0) {
AudioFormat format = new AudioFormat(grabber.getSampleRate(), 16, grabber.getAudioChannels(), true, true);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
try {
SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format);
return Optional.of(line);
} catch (LineUnavailableException e) {
return Optional.empty();
}
} else {
return Optional.empty();
}
}
public static int getTypeFromImage(BufferedImage image) {
DataBuffer buffer = image.getRaster().getDataBuffer();
if (buffer instanceof DataBufferByte) {
return GL11.GL_UNSIGNED_BYTE;
} else if (buffer instanceof DataBufferDouble) {
return GL11.GL_DOUBLE;
} else if (buffer instanceof DataBufferFloat) {
return GL11.GL_FLOAT;
} else if (buffer instanceof DataBufferInt) {
return GL11.GL_INT;
} else if (buffer instanceof DataBufferShort) {
return GL11.GL_SHORT;
} else if (buffer instanceof DataBufferUShort) {
return GL11.GL_UNSIGNED_SHORT;
} else {
return -1;
}
}
public static int getFormatFromImage(BufferedImage image) {
switch (image.getSampleModel().getNumBands()) {
case 1:
return GL11.GL_LUMINANCE;
case 2:
return GL30.GL_RG;
case 3:
return GL11.GL_RGB;
case 4:
return GL11.GL_RGBA;
default:
return -1;
}
}
public static int getTypeFromFrame(Frame frame) {
switch (frame.imageDepth) {
case Frame.DEPTH_BYTE:
return GL11.GL_BYTE;
case Frame.DEPTH_UBYTE:
return GL11.GL_UNSIGNED_BYTE;
case Frame.DEPTH_SHORT:
return GL11.GL_SHORT;
case Frame.DEPTH_USHORT:
return GL11.GL_UNSIGNED_SHORT;
case Frame.DEPTH_INT:
return GL11.GL_INT;
case Frame.DEPTH_FLOAT:
return GL11.GL_FLOAT;
case Frame.DEPTH_DOUBLE:
return GL11.GL_DOUBLE;
default:
return -1;
}
}
public static int getFormatFromFrame(Frame frame) {
switch (frame.imageChannels) {
case 1:
return GL11.GL_LUMINANCE;
case 2:
return GL30.GL_RG;
case 3:
return GL12.GL_BGR;
case 4:
return GL12.GL_RGBA;
default:
return -1;
}
}
public static int getRowFromFrame(Frame frame) {
int ch = frame.imageChannels;
return ch >= 1 ? frame.imageStride / ch : frame.imageWidth;
}
public static long getTimeDiff(long lastShowTime, SourceDataLine soundLine) {
return soundLine != null ? soundLine.getMicrosecondPosition() : (System.nanoTime() - lastShowTime) / 1000L;
}
}
import com.mojang.blaze3d.matrix.MatrixStack;
import com.mojang.blaze3d.platform.GlStateManager;
import net.minecraft.client.renderer.BufferBuilder;
import net.minecraft.client.renderer.Tessellator;
import net.minecraft.client.renderer.WorldVertexBufferUploader;
import net.minecraft.client.renderer.vertex.DefaultVertexFormats;
import net.minecraft.util.math.vector.Matrix4f;
import org.bytedeco.javacv.Frame;
import java.nio.ByteBuffer;
import java.util.concurrent.atomic.AtomicInteger;
import static org.lwjgl.opengl.GL11.*;
public class OpenGLVideo {
public static void draw(Frame frame, int width, int height, MatrixStack matrixStack, AtomicInteger textureR) {
int texture = textureR.get();
if (texture < 0) {
int newTexture = glGenTextures();
textureR.set(newTexture);
texture = newTexture;
}
Matrix4f matrix = matrixStack.getLast().getMatrix();
double widthZoom = frame != null ? (double) width / frame.imageWidth : 1.0;
double heightZoom = frame != null ? (double) height / frame.imageHeight : 1.0;
double zoom = Math.min(widthZoom, heightZoom);
int videoWidth = frame != null ? (int) (frame.imageWidth * zoom) : width;
int videoHeight = frame != null ? (int) (frame.imageHeight * zoom) : height;
int centerIncX = (int) ((double) Math.max(width - videoWidth, 0) / 2);
int centerIncY = (int) ((double) Math.max(height - videoHeight, 0) / 2);
int minX = videoWidth + centerIncX;
int minY = videoHeight + centerIncY;
int maxX = centerIncX;
int maxY = centerIncY;
BufferBuilder buffer = Tessellator.getInstance().getBuffer();
GlStateManager.bindTexture(texture);
// texture
if (frame != null) {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
ByteBuffer buf = ((ByteBuffer) frame.image[0]);
GlStateManager.pixelStore(GL_UNPACK_ROW_LENGTH, JavaCVPlayer.getRowFromFrame(frame));
GlStateManager.pixelStore(GL_UNPACK_SKIP_PIXELS, 0);
GlStateManager.pixelStore(GL_UNPACK_SKIP_ROWS, 0);
GlStateManager.pixelStore(GL_UNPACK_ALIGNMENT, 1);
int format = JavaCVPlayer.getFormatFromFrame(frame);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, frame.imageWidth, frame.imageHeight, 0, format, GL_UNSIGNED_BYTE, buf);
}
// texture
buffer.begin(GL_QUADS, DefaultVertexFormats.POSITION_TEX);
buffer.pos(matrix, (float) minX, (float) maxY, 0f).tex(1f, 0f).endVertex();
buffer.pos(matrix, (float) maxX, (float) maxY, 0.0F).tex(0f, 0f).endVertex();
buffer.pos(matrix, (float) maxX, (float) minY, 0.0F).tex(0f, 1f).endVertex();
buffer.pos(matrix, (float) minX, (float) minY, 0.0F).tex(1f, 1f).endVertex();
buffer.finishDrawing();
WorldVertexBufferUploader.draw(buffer);
// my
}
}
import com.mojang.blaze3d.matrix.MatrixStack;
import net.minecraft.client.gui.screen.Screen;
import net.minecraft.util.text.ITextComponent;
import org.bytedeco.javacv.Frame;
import java.awt.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
public class VideoScreen extends Screen {
public Frame frame = null;
public final AtomicBoolean runR = new AtomicBoolean(true);
public static final AtomicInteger textureR = new AtomicInteger(-1);
public VideoScreen(ITextComponent titleIn) {
super(titleIn);
}
@Override
public boolean keyPressed(int keyCode, int scanCode, int modifiers) {
if (keyCode == 256) {
getMinecraft().displayGuiScreen(null);
}
return true;
}
@Override
public void render(MatrixStack matrixStack, int mouseX, int mouseY, float partialTicks) {
fill(matrixStack, 0, 0, width, height, Color.BLACK.getRGB());
OpenGLVideo.draw(frame, width, height, matrixStack, textureR);
super.render(matrixStack, mouseX, mouseY, partialTicks);
}
@Override
public void onClose() {
runR.set(false);
}
@Override
public void tick() {
if (!runR.get()) {
getMinecraft().displayGuiScreen(null);
}
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment