Skip to content

Instantly share code, notes, and snippets.

@duangsuse
Forked from Trumeet/JPlayer.java
Last active January 18, 2021 18:11
Show Gist options
  • Save duangsuse/64c9ac7a278da48f4b3de3dafd70e9df to your computer and use it in GitHub Desktop.
Save duangsuse/64c9ac7a278da48f4b3de3dafd70e9df to your computer and use it in GitHub Desktop.
Linux Terminal has the capability of displaying 8-bit colour, so why not play something on it?
mkdir -p src/main/java; cp *.java src/main/java
python mvn.py trumeet:asciiart:1.0-SNAPSHOT org.jcodec:-,--javase:0.2.3 >pom.xml
mvn install
you-get https://www.bilibili.com/video/av706
mv *Bad\ Apple*.mp4 BadApple.mp4
export LINES=$LINES COLUMNS=$COLUMNS
echo $COLUMNS x $LINES
mvn exec:java -Dexec.mainClass=JPlayer1 -Dexec.args=BadApple.mp4
mvn exec:java -Dexec.mainClass=JPlayer -Dexec.args='BadApple_con.mp4 BadApple.mp4.wav'
# compile C++ OpenCV ver.
head -n 1 asciiv.cpp|tr -d '/' |sh
cvlc BadApple.mp4.wav 2&>/dev/null& ./a.out BadApple_con.mp4
//g++ asciiv.cpp `pkgconf opencv --libs`
#include <iostream>
#include <string>
#include <chrono>
#include <thread> //sleep_for
#include <opencv2/core/mat.hpp>
#include <opencv2/imgproc/imgproc.hpp> //cvtColor, resize
#include <opencv2/videoio.hpp>
#include <opencv2/highgui/highgui.hpp>
#ifdef __unix
# include <sys/ioctl.h> //getwinsz
#endif
using string = std::string;
using Clock = std::chrono::high_resolution_clock;
using Time = std::chrono::milliseconds;
using Mat = cv::Mat;
typedef uint8_t Pix;
typedef cv::Point3_<Pix> Pix3; // R[GB] channel color used to sto fg/bg ANSI code
cv::Size whCon;
const char ANSI_E[] = "\x1b[";
static inline void writePixel(std::ostream& os, int gray) {
int cps[2];
#define cp(fg,bg) cps[0]=fg, cps[1]=bg; break;
switch (gray) {
case 0 ... 63: cp(30, 40)
case 64 ... 126: cp(90, 100)
case 127 ... 189: cp(37, 47)
case 190 ... 255: cp(97, 107)
}
#undef cp
os<<ANSI_E<<cps[0]<<"m"<<ANSI_E<<cps[1]<<"m ";
}
static inline Mat pipe(Mat m) {
Mat mBW(m.size(), CV_8U);
Mat m1(whCon, mBW.type());
cv::cvtColor(m, mBW, CV_BGR2GRAY);
cv::resize(mBW, m1, whCon); mBW.release();
return m1;
}
void writeFrame1(Mat m) {
Mat m1 = pipe(m);
for (int i=0; i!=m1.rows; i++) {
Pix *p=m1.ptr<Pix>(i,0), *p1=p+m1.cols;
for (; p!=p1; p++) { writePixel(std::cout, *p); }
std::cout<<std::endl;
}
m1.release();
}
static int colorFix = 0;
void writeFrame2C(Mat out) {
imshow("render", out); if (cv::waitKey(1) == 'p') std::cin >> colorFix;
for (int i=0; i!=out.rows; i++) {
auto p=out.ptr<Pix3>(i,0), p1=p+out.cols;
std::cout<<ANSI_E<<i<<'H';
for (; p!=p1; p++) { std::cout<<ANSI_E<<colorFix+p->y<<"m"<<ANSI_E<<colorFix+p->x<<"m "; }
//std::cout<<std::endl;
}
out.release();
}
cv::VideoWriter vwriter;
void writeFrame2(Mat m) {
Mat m1 = pipe(m);
Mat out(m1.size(), CV_8UC3);
#define cp(fg,bg) pixel.x=fg; pixel.y=bg; break;
out.forEach<Pix3>([&](Pix3& pixel, const int yx[]) -> void {
pixel.z = 0; // xyz, BGR order
auto px = m1.at<Pix>(yx[0], yx[1]);
switch (px) {
case 0 ... 63: cp(30, 40)
case 64 ... 126: cp(90, 100)
case 127 ... 189: cp(37, 47)
case 190 ... 255: cp(97, 107)
}
});
#undef cp
m1.release();
vwriter.write(out);
writeFrame2C(out);
}
typedef const char* cstr;
int ienv(cstr k, int deft = 0) { auto s = std::getenv(k); return (s!=nullptr)? atoi(s) : deft; }
int envFourcc() { cstr s = std::getenv("FOURCC"); if (s==nullptr) s="DIB "; return (strlen(s)==0)? -1 : cv::VideoWriter::fourcc(s[0],s[1],s[2],s[3]); }
void playAA(string fp) {
cv::VideoCapture vid(fp);
cv::namedWindow("frame", CV_WINDOW_AUTOSIZE);
double fps = vid.get(CV_CAP_PROP_FPS);
#define seek(d) vid.set(CV_CAP_PROP_POS_MSEC, vid.get(CV_CAP_PROP_POS_MSEC)+d*5000)
Time ft_ms{static_cast<int>(1000.0/fps)};
// begin refreshing
int mech = ienv("curs");
auto writeFrame = (mech==1)? writeFrame2 : (mech==2)? writeFrame2C : writeFrame1;
switch (mech) { case 1: vwriter.open("cache.avi", envFourcc(), fps, whCon); case 2: cv::namedWindow("render", CV_WINDOW_NORMAL); }
Mat img;
std::chrono::_V2::system_clock::time_point t, t1;
while (vid.read(img)) {
t = Clock::now();
cv::imshow("frame", img);
switch (cv::waitKey(1)) { case '=': seek(1); break; case '-': seek(-1); break; case 'q': goto out; }
writeFrame(img); img.release();
t1 = Clock::now();
auto dt = (t1 - t);
if (dt < ft_ms) {
auto delay = ft_ms - dt;
std::this_thread::sleep_for(std::chrono::duration_cast<Time>(delay));
}
}
out: vid.release(); vwriter.release();
#undef seek
}
int main(int argc, char* argv[]) {
#ifdef __unix
struct winsize con; ioctl(0, TIOCGWINSZ, &con);
whCon = cv::Size(con.ws_col, con.ws_row);
#else
whCon = cv::Size(ienv("COLUMNS"), ienv("LINES"));
#endif
std::cout<<"Geo: "<<whCon<<std::endl;
for (int i=1; i<argc; i++) playAA(string(argv[i]));
}
import org.jcodec.api.FrameGrab;
import org.jcodec.common.io.NIOUtils;
import org.jcodec.common.model.Picture;
import org.jcodec.scale.AWTUtil;
import java.awt.image.BufferedImage;
import java.io.*;
import javax.sound.sampled.AudioInputStream;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
/**
* Linux Terminal has the capability of displaying 8-bit colour, so why not play something on it?
*
* Libraries:
* <pre>
* implementation 'org.jcodec:jcodec:0.2.5'
* implementation 'org.jcodec:jcodec-javase:0.2.5'
* </pre>
*
* Pre-scaled video required. Separate wav audio required. Scale the video so that the size will not exceed $COLUMNS pixels in width by $LINES pixels in height.
* $COLUMNS and $LINES variables required.
* WTFPL
*
* https://gist.github.com/Trumeet
*
* @author YuutaW
*/
public class JPlayer {
public static void main(String... args) throws Throwable {
final int mspf = 1000 / 30;
final int columns = Integer.parseInt(System.getenv("COLUMNS"));
final int lines = Integer.parseInt(System.getenv("LINES"));
final int speed = Integer.parseInt(System.getenv("SPEED"));
final FrameGrab grab = FrameGrab.createFrameGrab(NIOUtils.readableChannel(new File(args[0])));
int[][] framebuffer = new int[columns][lines];
AudioInputStream audioIn = AudioSystem.getAudioInputStream(new File(args[1]));
Clip clip = AudioSystem.getClip();
clip.open(audioIn);
System.out.print("\033[2J");
System.out.print("Press any key to start...");
System.in.read();
System.out.print("\033[?25l");
Runtime.getRuntime().addShutdownHook(new Thread(() -> {
System.out.print("\033[?25h\u001B[0m");
}));
final BufferedWriter writer = new BufferedWriter(new OutputStreamWriter(System.out),
2 ^ 32);
clip.start();
while(true) {
final long start = System.currentTimeMillis();
final Picture picture = grab.getNativeFrame();
if(picture == null) break;
final BufferedImage image = AWTUtil.toBufferedImage(picture);
for (int row = 0; row < image.getHeight(); row ++) {
for (int column = 0; column < image.getWidth(); column ++) {
final int gray = image.getRGB(column, row) & 0xFF;
if(framebuffer[column][row] != gray) {
render(gray, row, column, writer);
}
framebuffer[column][row] = gray;
}
}
writer.flush();
final long took = System.currentTimeMillis() - start;
if(took <= mspf) {
Thread.sleep((mspf - took) / speed);
}
}
}
private static void render(final int gray, final int line, final int column,
Writer writer) throws IOException {
int foregroundColorCode;
int backgroundColorCode;
if(gray <= 63) {
// Black.
foregroundColorCode = 30;
backgroundColorCode = 40;
} else if(gray <= 126) {
// Dark Gray
foregroundColorCode = 90;
backgroundColorCode = 100;
} else if(gray <= 189) {
// Light Gray
foregroundColorCode = 37;
backgroundColorCode = 47;
} else {
// White
foregroundColorCode = 97;
backgroundColorCode = 107;
}
// We don't use format here, since it's slow.
// '[%d;%dH[%dm[%dm '
final StringBuilder stringBuilder = new StringBuilder()
.append("\033[")
.append(line)
.append(";")
.append(column)
.append("H\u001B[")
.append(foregroundColorCode)
.append("m\u001b[")
.append(backgroundColorCode)
.append("m ");
writer.write(stringBuilder.toString());
}
}
import java.io.File;
import java.awt.image.BufferedImage;
import javax.sound.sampled.AudioSystem;
import javax.sound.sampled.Clip;
import java.io.Writer;
import java.io.OutputStreamWriter;
import java.io.BufferedOutputStream; // faster than -Writer
import java.io.IOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.concurrent.atomic.AtomicInteger; // ringbuffer
import java.util.concurrent.BlockingQueue; // multi-thread
import java.util.concurrent.ArrayBlockingQueue;
import static java.lang.System.out;
import static org.jcodec.common.io.NIOUtils.readableChannel;
import static java.lang.System.nanoTime;
import org.jcodec.api.FrameGrab;
import org.jcodec.common.DemuxerTrackMeta;
import org.jcodec.common.model.Picture;
import static org.jcodec.scale.AWTUtil.toBufferedImage;
final public class JPlayer1 {
static private Utils u = Utils.INSTANCE;
enum Mechanism { Buffered, Posited, Sequed, SequedRst }
static public void main(String... args) {
JPlayer1 pp = new JPlayer1(u.ienv("NCHUNK", 5*60));
pp.speed = u.ienv("SPEED", 1);
Mechanism m = Mechs.get(System.getenv("MECH"));
if (m != null) pp.mech = m;
for (String fp : args)
try { pp.play(new File(fp)); }
catch (Exception ex) { System.err.println(fp+":"); ex.printStackTrace(); }
}
static private HashMap<String, Mechanism> Mechs = new HashMap<>();
static {
String[] ss = "b p s sr".split(" "); int i=0;
for (Mechanism m : Mechanism.class.getEnumConstants()) Mechs.put(ss[i++], m);
}
public JPlayer1(int chunkSize) {
if (chunkSize > 0) chunks = new String[chunkSize];
else if (chunkSize == -1) rendered = new ArrayBlockingQueue(THREAD_CACHE_NCHUNK);
else if (chunkSize < 0) { System.err.println("ignore cache size <0: "+chunkSize); }
}
static int THREAD_CACHE_NCHUNK = 60*30;
Mechanism mech = Mechanism.Buffered; long speed = 1;
int w, h;
FrameGrab vid; double fps; Clip audDev;
protected int[] framebuffer/*refresh cache*/;
protected String[] chunks;
private BlockingQueue<String> rendered;
protected AtomicInteger iHead = new AtomicInteger(), iTail = new AtomicInteger();
protected String linesep = System.lineSeparator();
protected java.io.Console con = System.console();
void generateAndOpen(File f_vid) {
w = divlow(2, u.ienv("COLUMNS")); h = divlow(2, u.ienv("LINES"));
File f_vid1 = u.genIfMissing(ffArgv(f_vid, "", "-y"/*overwrite*/, "-vf", String.format("scale=%d:%d", w,h)));
File f_aud = u.genIfMissing(ffArgv(f_vid, ".wav"));
try {
vid = FrameGrab.createFrameGrab(readableChannel(f_vid1));
DemuxerTrackMeta md = vid.getVideoTrack().getMeta();
fps = md.getTotalFrames()/md.getTotalDuration();
audDev = AudioSystem.getClip();
audDev.open(AudioSystem.getAudioInputStream(f_aud));
} catch (Exception ex) { throw new RuntimeException("no open", ex); }
if (mech == Mechanism.Buffered) framebuffer = new int[w*h];
con.printf("%s: geo [%dx%d]", f_vid1.getName(), w,h);
}
static private int divlow(int n, int x) { return (x/n)*n; }
static final int MS_NANOS = 1000000;
void play(File f_vid) throws IOException {
generateAndOpen(f_vid);
long ft_nanos = (long)(1000.0*MS_NANOS / fps);
con.printf(", %f fps =%dnanos, mechanism:%s cache %d, %dx", fps, ft_nanos, mech, (chunks!=null)? chunks.length : 0, speed);
out.println();
java.io.OutputStream outs = out;//new BufferedOutputStream(out, (2+4)*w*h);
Writer o = new OutputStreamWriter(outs)/*buffering not necessary*/;
playIntro(o);
long t0 = nanoTime();
if (chunks != null) generateChunk();
if (rendered != null) {
new Thread(() -> {
try { while (true) rendered.put(renderToString(nextFrame())); }
catch (InterruptedException|NullPointerException _) { while (!rendered.isEmpty()); exitApp(); }
}, "render").start();
}
audDev.start();
try {
if (rendered == null) while (true) { //< MAIN draw loop
if (chunks != null) o.write(chunks[incPtr(iHead)]);
else onFrame(o, nextFrame());
o.flush(); con.flush();
if (chunks != null && samePtr(iHead, iTail)/*full*/) { generateChunk(); if (iHead == null) break; }
long t1 = nanoTime();
long dt = (t1 - t0);
if (dt < ft_nanos) {
long delay = (ft_nanos - dt) / speed;
long start = nanoTime();
do { con.flush(); if (!samePtr(iHead, iTail)) generateChunk(1); } while (nanoTime() - start < delay);
t0 = t1 + delay;
}
else { t0 = t1; } // count extra time to next frame dt.
}
else while (true) { // async
o.write(rendered.take());
long t1 = nanoTime();
long dt = (t1 - t0);
if (dt < ft_nanos) {
long delay = (ft_nanos - dt) / speed; //: any code reuse?
Thread.sleep(delay/MS_NANOS, (int)(delay%MS_NANOS));
t0 = t1 + delay;
} else { t0 = t1; }
}
} catch (IOException|NullPointerException|InterruptedException _) {}
finally { audDev.stop(); audDev.close(); exitApp(); }
}
void exitApp() { System.exit(0); } // temp. solution for Thread break
void generateChunk() { generateChunk(chunks.length); }
void generateChunk(int n) {
for (int i=0; i<n; i++) {
BufferedImage img = nextFrame();
if (img == null) { if (samePtr(iHead, iTail)) iHead = null; return; }
chunks[incPtr(iTail)] = renderToString(img);
}
}
String renderToString(BufferedImage img) {
Writer o = new java.io.StringWriter();
onFrame(o, img); return o.toString();
}
BufferedImage nextFrame() {
try { return toBufferedImage(vid.getNativeFrame()); } catch (IOException _) { throw new NullPointerException("done"); }
}
protected int incPtr(AtomicInteger ai) { return ai.getAndUpdate(i -> (i+1)%chunks.length); }
protected boolean samePtr(AtomicInteger a, AtomicInteger b) { return a.get() == b.get(); }
protected void onFrame(Writer o, BufferedImage img) {
for (int y=0; y<h; y++) for (int x=0; x<w; x++) {
int pix = img.getRGB(x,y);
int p = y*w+x, gray = pix & 0xFF;
switch (mech) {
case SequedRst:
if (x==0) u.ansi(o, y, 'H');
u.ansi(o, cp_fg[gray],'m', cp_bg[gray], "m ");
break;
case Sequed:
if (x==0) try { o.write(linesep); o.flush(); } catch (IOException _) {}
u.ansi(o, cp_fg[gray],'m', cp_bg[gray], "m ");
break;
case Posited:
writePixelTo(o, x,y, gray); break;
case Buffered:
if (framebuffer[p] != gray) { writePixelTo(o, x,y, gray); framebuffer[p] = gray; }
break;
}
}
}
static void writePixelTo(Writer o, int x, int y, int gray) { u.ansi(o, y,";",x, 'H', cp_fg[gray],'m', cp_bg[gray], "m "); }
protected void playIntro(Writer o) {
out.print("Press to start...");
try { System.in.read(); u.ansi(o, 2, 'J'); o.flush(); }
catch (IOException _) { throw new RuntimeException("cancelled stdin"); }
int code = 25/*hide_cursor*/;
u.ansi(o, "?",code, 'l'); //v ok to redo.
Runtime.getRuntime().addShutdownHook(new Thread(() -> u.ansi(o, "?",code, 'h', 0,'m')));
}
static private String FF_I[] = {"ffmpeg", "-i"};
static String[] ffArgv(File f_in, String out_suffix, String... args) {
String[] argv = new String[2+1+args.length+1];
String oname = f_in.getName();
u.arraymv(FF_I, 0, argv, 0); argv[2] = f_in.getPath();
u.arraymv(args, 0, argv, 3); argv[argv.length-1] = (out_suffix=="")? u.mapRSplit(".", oname, 0, s->s+"_con") : oname+out_suffix;
return argv;
}
static private int cp_sw[] = {0,64,127,190,256};
static int[]
cp_fg = u.toPrim(u.rangeSwitch(new Integer[]{30,90,37,97}, cp_sw)),
cp_bg = u.toPrim(u.rangeSwitch(new Integer[]{40,100,47,107}, cp_sw));
static class Utils {
private Utils() {} static public Utils INSTANCE = new Utils();
void ansi(Writer o, Object... cmd) { try { _ansi(o, cmd); } catch (IOException _) {} } // damn Java
private void _ansi(Writer o, Object... cmd) throws IOException {
o.write("\u001b[");
for (int i=0; i<cmd.length; i++) {
Object obj = cmd[i];
if (obj instanceof Character&&i!=cmd.length -1) o.write((char)obj+"\u001b[");
else o.write(obj.toString());
}
}
@FunctionalInterface
interface SFunc { String transform(String s); }
String mapRSplit(String sep, String s, int no, SFunc op) {
int iR = s.lastIndexOf(sep); if (iR == -1) return s;
return mapSSide(s,iR,no, op);
}
String mapSSide(String s, int i, int no, SFunc op) {
String a = s.substring(0,i), b = s.substring(i);
return (no==0)? op.transform(a)+b : a+op.transform(b);
}
int[] toPrim(Integer[] xs) {
int[] xs1 = new int[xs.length]; for (int i=0; i<xs1.length; i++) xs1[i] = xs[i];
return xs1;
}
<T> T[] rangeSwitch(T[] values, int... stop_indexs) {
int[] a = stop_indexs; Arrays.sort(a);
int min=a[0], max=a[a.length-1];
@SuppressWarnings("unchecked")
T[] res = (T[])java.lang.reflect.Array.newInstance(values[0].getClass(), (max-min));
for (int i=0; i<a.length -1/*avoid a[lastIdx+1]*/; i+=1) {
Arrays.fill(res, a[i], a[i+1], values[i]);
}
return res;
}
void arraymv(Object[] src, int p, Object dst, int pd) { System.arraycopy(src, p, dst, pd, src.length); }
int ienv(String name) { return ienv(name, null); }
int ienv(String name, Integer deft) { String s = System.getenv(name); return (s!=null)? Integer.parseInt(s) : (deft!=null)? deft : _ithrow(new IllegalArgumentException("no environ "+name)); }
int _ithrow(RuntimeException ex) throws RuntimeException { throw ex; }
File genIfMissing(String... argv) {
File fOut = new File(argv[argv.length-1]);
if (!fOut.exists()) {
out.println(Arrays.toString(argv));
int rn; try { rn = Runtime.getRuntime().exec(argv).waitFor(); }
catch (IOException|InterruptedException _) { rn = 1; }
if (rn != 0) throw new RuntimeException("failed exec "+Arrays.toString(argv));
}
return fOut;
}
}
}
#!python3
from lxml import etree
from lxml.builder import E
from itertools import chain
def lets(o, *ops): [op(o) for op in ops]
def gavTo(e, g,a,v): [e.append(E(t, s)) for t,s in {"groupId":g, "artifactId":a, "version":v}.items()]
def gavSplit(s): (g,a,v) = s.split(':'); return [(g,a,v)] if ',' not in a else [(g,aa.replace("-", g.rsplit('.')[-1], 1),v) for aa in a.split(',')]
def listE(tag, op, xs): e = E(tag+"s" if tag[-1]!='y' else tag[:-1]+"ies"); [lets(E(tag), lambda ee: op(ee,x), e.append) for x in xs]; return e
def proj(coord, deps, javac="1.8"):
root = E("project", E("modelVersion", "4.0.0")); gavTo(root, *gavSplit(coord)[0]); gav = lambda e,c: gavTo(e,*c)
root.append(E("build", listE("plugin", gav, [("org.apache.maven.plugins", "maven-compiler-plugin", "3.1")] ) ))
root.find("build/plugins").getchildren()[0].append(E("configuration", E("source",javac), E("target",javac)) )
root.append(listE("dependency", gav, chain.from_iterable(map(gavSplit, deps)) )); return root
from sys import argv
if __name__ == "__main__":
root = proj(argv[1], argv[2:])
print(etree.tostring(root, pretty_print=True).decode())
from re import sub
from lxml import etree, builder
E=builder.E
def noun2(s): return sub("(y|s)$", lambda m: "ie" if m[1]=="y" else "", s)+"s"
def noun2(s): return s[:-1]+"ies" if s[-1]=="y" else s+"s"
for it in "word","words","ivy": print(noun2(it))
def also(op, x): op(x); return x
def listE(tag, op): e=E(noun2(tag)); op(lambda: also(e.append, E(tag)) ); return e
def _s(s): return s.split(" ")
def got(o): return lambda *_: o
def got2nd(op, x): return lambda a: op(a, x)
def _gav(ae,c): e=ae(); [e.append(E(t,v)) for (t,v) in zip(_s("groupId artifactId version"), c)]
def gav(ae,s): c=(g,a,v)=s.split(":"); [_gav(ae, (g,aa.replace("-",g[g.rfind(".")+1:],1),v)) for aa in a.split(",")] if "," in a else _gav(ae,c)
root = E("project")
_gav(got(root), _s("h d c"))
root.append(listE("dependency", got2nd(gav,"org.h:-w,w1:d")))
print(etree.tostring(root, pretty_print=1).decode())
from cv2 import VideoCapture, UMat, namedWindow, imshow, waitKey
from sys import stdout
AE = b"\x1b["
def play(out, vid, ft=b"%d"):
unstop, frame = vid.read()
while unstop:
a = frame
imshow("frame", a); waitKey(1)
(n, m, nChn) = frame.shape
for i in range(n):
for s in AE,ft%i,b"H": out.write(s)
for j in range(m):
c = a[i, j]
for s in AE,ft%c[0],b"m",AE,ft%c[1],b"m ": out.write(s)
unstop, frame = vid.read()
namedWindow("frame", 0b0)
play(stdout.buffer, VideoCapture("cache.avi"))
@duangsuse
Copy link
Author

🌚简单说一下技术细节吧。
这次新增两个py脚本,一个是对 mvn.py 继续缩行的复用性测试,一个是 C++ 版更新(支持 mech=0 ,1,2 变量) 后能播放其录制无损动画的脚本

C++ 这次修复了一个 unstop=vid.read(/to/frame) 生命周期关系 使用旧值的 bug(导致视频结束时出错)
性能方面是没什么好说的(我都懒得优化,它毕竟是 C++🌝
mech0 是既有的直接生成 ANSI 控制台转义/换行符序列
1则是带录制到文件效果(编码器参数 FOURCC)的 goto-line 无换行符序列, 2 可以回放生成的 cache.avi (render窗 p键敲字可设置色码偏移)

Java 早就有 b,p,s,sr 四种 MECH
buffered 是用 framebuffer 每帧部分生成色值(渲染)并刷新屏幕,这个很重要,性能往往是最好的(C++: 🌚就是霸道)
posited 就是全屏重绘的 buffered
seq 是用换行符到, seqrst 是用 [y H 转义跳至行的,性能好一点

@duangsuse
Copy link
Author

mvn exec:java -Dexec.mainClass=JPlayer1 -Dexec.args=BadApple.mp4 54.07s user 8.04s system 119% cpu 51.925 total
mvn exec:java -Dexec.mainClass=JPlayer 42.69s user 7.59s system 120% cpu 41.633 total

Yuuta 又赢了。 好了,睡觉吧,争取下次能写出性能更好的版本来 😂
*注: Yuuta 原版是 BufferedWriter+StringBuilder 缓冲的,没有多余 flush ,但我们为了避免帧撕裂 flush 了许多次(有效果的),从某种意义讲我们还是可以更快的
新版支持 MECH(屏幕刷新方式),NCHUNK,SPEED 参数, NCHUNK=0 时禁用 queue rendering, -1 时多线程 queue, 默认环形缓冲区 queue
为测试我给原版支持了 SPEED 参数,设置得足够大使 Thread.sleep 不会运行。

@duangsuse
Copy link
Author

duangsuse commented Jan 18, 2021

修改建议 : JPlayer.java 里 getenv("SPEED", deft) 的,提供默认值 1
JPlayer1 里 SPEED 因为 ienv 类型限制不好改 double ,但可以做成 delay * speed/1000 的千分之一比率

不安全的类型转换的,可以检查对应行,加 @SuppressWarnings("unchecked")

https://gist.github.com/duangsuse/64c9ac7a278da48f4b3de3dafd70e9df#file-jplayer1-java-L94

这里有个大问题,下面 finallyexitApp() 的会导致视频放不完就自动退出,思考一下什么方法,或者怎么修正就可以把 chunks 里剩余的帧弄完。

(还有一个小问题,可以先试着给 asciiv.cpp 支持 SPEED 环境变量)

我懒得改了,大家既然是学习也不在乎这些精力,非常抱歉 😭。

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment