Created
May 17, 2016 12:23
-
-
Save anonymous/881f64f93903cd21859f08220ef41514 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
From 76becac9ecab97e8a33c34dae865626d2aa1642b Mon Sep 17 00:00:00 2001 | |
From: Marton Balint <cus@passwd.hu> | |
Date: Mon, 16 May 2016 01:41:07 +0100 | |
Subject: [PATCH 1/3] ffplay: convert ffplay to use SDL2 | |
--- | |
configure | 27 ++- | |
ffplay.c | 595 ++++++++++++++++++++++++++------------------------------------ | |
2 files changed, 270 insertions(+), 352 deletions(-) | |
diff --git a/configure b/configure | |
index 2dede36..f30f536 100755 | |
--- a/configure | |
+++ b/configure | |
@@ -2007,6 +2007,7 @@ HAVE_LIST=" | |
perl | |
pod2man | |
sdl | |
+ sdl2 | |
section_data_rel_ro | |
texi2html | |
threads | |
@@ -3075,8 +3076,8 @@ ffmpeg_deps="avcodec avfilter avformat swresample" | |
ffmpeg_select="aformat_filter anull_filter atrim_filter format_filter | |
null_filter | |
setpts_filter trim_filter" | |
-ffplay_deps="avcodec avformat swscale swresample sdl" | |
-ffplay_libs='$sdl_libs' | |
+ffplay_deps="avcodec avformat swscale swresample sdl2" | |
+ffplay_libs='$sdl2_libs' | |
ffplay_select="rdft crop_filter transpose_filter hflip_filter vflip_filter rotate_filter" | |
ffprobe_deps="avcodec avformat" | |
ffserver_deps="avformat fork sarestart" | |
@@ -5767,7 +5768,24 @@ if enabled gcrypt; then | |
fi | |
fi | |
-if ! disabled sdl; then | |
+ | |
+SDL2_CONFIG="${cross_prefix}sdl2-config" | |
+if check_pkg_config sdl2 SDL_events.h SDL_PollEvent; then | |
+ check_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) >= 0x020001" $sdl2_cflags && | |
+ check_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) < 0x020100" $sdl2_cflags && | |
+ enable sdl2 | |
+else | |
+ if "${SDL2_CONFIG}" --version > /dev/null 2>&1; then | |
+ sdl2_cflags=$("${SDL2_CONFIG}" --cflags) | |
+ sdl2_libs=$("${SDL2_CONFIG}" --libs) | |
+ check_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) >= 0x020001" $sdl2_cflags && | |
+ check_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) < 0x020100" $sdl2_cflags && | |
+ enable sdl2 | |
+ fi | |
+fi | |
+enabled sdl2 && add_cflags $sdl2_cflags && add_extralibs $sdl2_libs | |
+ | |
+if ! disabled sdl and ! enabled sdl2; then | |
SDL_CONFIG="${cross_prefix}sdl-config" | |
if check_pkg_config sdl SDL_events.h SDL_PollEvent; then | |
check_cpp_condition SDL.h "(SDL_MAJOR_VERSION<<16 | SDL_MINOR_VERSION<<8 | SDL_PATCHLEVEL) >= 0x010201" $sdl_cflags && | |
@@ -6369,6 +6387,7 @@ echo "network support ${network-no}" | |
echo "threading support ${thread_type-no}" | |
echo "safe bitstream reader ${safe_bitstream_reader-no}" | |
echo "SDL support ${sdl-no}" | |
+echo "SDL2 support ${sdl2-no}" | |
echo "opencl enabled ${opencl-no}" | |
echo "JNI support ${jni-no}" | |
echo "texi2html enabled ${texi2html-no}" | |
@@ -6528,7 +6547,7 @@ HOSTLD_O=$HOSTLD_O | |
TARGET_EXEC=$target_exec $target_exec_args | |
TARGET_PATH=$target_path | |
TARGET_SAMPLES=${target_samples:-\$(SAMPLES)} | |
-CFLAGS-ffplay=$sdl_cflags | |
+CFLAGS-ffplay=$sdl2_cflags | |
ZLIB=$($ldflags_filter -lz) | |
LIB_INSTALL_EXTRA_CMD=$LIB_INSTALL_EXTRA_CMD | |
EXTRALIBS=$extralibs | |
diff --git a/ffplay.c b/ffplay.c | |
index d5fcde8..451ad50 100644 | |
--- a/ffplay.c | |
+++ b/ffplay.c | |
@@ -105,6 +105,8 @@ const int program_birth_year = 2003; | |
#define CURSOR_HIDE_DELAY 1000000 | |
+#define USE_ONEPASS_SUBTITLE_RENDER 1 | |
+ | |
static unsigned sws_flags = SWS_BICUBIC; | |
typedef struct MyAVPacketList { | |
@@ -151,17 +153,17 @@ typedef struct Clock { | |
typedef struct Frame { | |
AVFrame *frame; | |
AVSubtitle sub; | |
- AVSubtitleRect **subrects; /* rescaled subtitle rectangles in yuva */ | |
int serial; | |
double pts; /* presentation timestamp for the frame */ | |
double duration; /* estimated duration of the frame */ | |
int64_t pos; /* byte position of the frame in the input file */ | |
- SDL_Overlay *bmp; | |
+ SDL_Texture *bmp; | |
int allocated; | |
- int reallocate; | |
int width; | |
int height; | |
+ int format; | |
AVRational sar; | |
+ int uploaded; | |
} Frame; | |
typedef struct FrameQueue { | |
@@ -271,6 +273,8 @@ typedef struct VideoState { | |
FFTSample *rdft_data; | |
int xpos; | |
double last_vis_time; | |
+ SDL_Texture *vis_texture; | |
+ SDL_Texture *sub_texture; | |
int subtitle_stream; | |
AVStream *subtitle_st; | |
@@ -283,11 +287,8 @@ typedef struct VideoState { | |
AVStream *video_st; | |
PacketQueue videoq; | |
double max_frame_duration; // maximum duration of a frame - above this, we consider the jump a timestamp discontinuity | |
-#if !CONFIG_AVFILTER | |
struct SwsContext *img_convert_ctx; | |
-#endif | |
struct SwsContext *sub_convert_ctx; | |
- SDL_Rect last_display_rect; | |
int eof; | |
char *filename; | |
@@ -312,8 +313,6 @@ typedef struct VideoState { | |
static AVInputFormat *file_iformat; | |
static const char *input_filename; | |
static const char *window_title; | |
-static int fs_screen_width; | |
-static int fs_screen_height; | |
static int default_width = 640; | |
static int default_height = 480; | |
static int screen_width = 0; | |
@@ -361,7 +360,8 @@ static AVPacket flush_pkt; | |
#define FF_ALLOC_EVENT (SDL_USEREVENT) | |
#define FF_QUIT_EVENT (SDL_USEREVENT + 2) | |
-static SDL_Surface *screen; | |
+static SDL_Window *window; | |
+static SDL_Renderer *renderer; | |
#if CONFIG_AVFILTER | |
static int opt_add_vfilter(void *optctx, const char *opt, const char *arg) | |
@@ -646,12 +646,6 @@ static void decoder_destroy(Decoder *d) { | |
static void frame_queue_unref_item(Frame *vp) | |
{ | |
- int i; | |
- for (i = 0; i < vp->sub.num_rects; i++) { | |
- av_freep(&vp->subrects[i]->data[0]); | |
- av_freep(&vp->subrects[i]); | |
- } | |
- av_freep(&vp->subrects); | |
av_frame_unref(vp->frame); | |
avsubtitle_free(&vp->sub); | |
} | |
@@ -802,113 +796,47 @@ static void decoder_abort(Decoder *d, FrameQueue *fq) | |
packet_queue_flush(d->queue); | |
} | |
-static inline void fill_rectangle(SDL_Surface *screen, | |
- int x, int y, int w, int h, int color, int update) | |
+static inline void fill_rectangle(int x, int y, int w, int h) | |
{ | |
SDL_Rect rect; | |
rect.x = x; | |
rect.y = y; | |
rect.w = w; | |
rect.h = h; | |
- SDL_FillRect(screen, &rect, color); | |
- if (update && w > 0 && h > 0) | |
- SDL_UpdateRect(screen, x, y, w, h); | |
-} | |
- | |
-/* draw only the border of a rectangle */ | |
-static void fill_border(int xleft, int ytop, int width, int height, int x, int y, int w, int h, int color, int update) | |
-{ | |
- int w1, w2, h1, h2; | |
- | |
- /* fill the background */ | |
- w1 = x; | |
- if (w1 < 0) | |
- w1 = 0; | |
- w2 = width - (x + w); | |
- if (w2 < 0) | |
- w2 = 0; | |
- h1 = y; | |
- if (h1 < 0) | |
- h1 = 0; | |
- h2 = height - (y + h); | |
- if (h2 < 0) | |
- h2 = 0; | |
- fill_rectangle(screen, | |
- xleft, ytop, | |
- w1, height, | |
- color, update); | |
- fill_rectangle(screen, | |
- xleft + width - w2, ytop, | |
- w2, height, | |
- color, update); | |
- fill_rectangle(screen, | |
- xleft + w1, ytop, | |
- width - w1 - w2, h1, | |
- color, update); | |
- fill_rectangle(screen, | |
- xleft + w1, ytop + height - h2, | |
- width - w1 - w2, h2, | |
- color, update); | |
-} | |
- | |
-#define ALPHA_BLEND(a, oldp, newp, s)\ | |
-((((oldp << s) * (255 - (a))) + (newp * (a))) / (255 << s)) | |
- | |
- | |
- | |
-#define BPP 1 | |
- | |
-static void blend_subrect(uint8_t **data, int *linesize, const AVSubtitleRect *rect, int imgw, int imgh) | |
-{ | |
- int x, y, Y, U, V, A; | |
- uint8_t *lum, *cb, *cr; | |
- int dstx, dsty, dstw, dsth; | |
- const AVSubtitleRect *src = rect; | |
- | |
- dstw = av_clip(rect->w, 0, imgw); | |
- dsth = av_clip(rect->h, 0, imgh); | |
- dstx = av_clip(rect->x, 0, imgw - dstw); | |
- dsty = av_clip(rect->y, 0, imgh - dsth); | |
- lum = data[0] + dstx + dsty * linesize[0]; | |
- cb = data[1] + dstx/2 + (dsty >> 1) * linesize[1]; | |
- cr = data[2] + dstx/2 + (dsty >> 1) * linesize[2]; | |
- | |
- for (y = 0; y<dsth; y++) { | |
- for (x = 0; x<dstw; x++) { | |
- Y = src->data[0][x + y*src->linesize[0]]; | |
- A = src->data[3][x + y*src->linesize[3]]; | |
- lum[0] = ALPHA_BLEND(A, lum[0], Y, 0); | |
- lum++; | |
- } | |
- lum += linesize[0] - dstw; | |
- } | |
- | |
- for (y = 0; y<dsth/2; y++) { | |
- for (x = 0; x<dstw/2; x++) { | |
- U = src->data[1][x + y*src->linesize[1]]; | |
- V = src->data[2][x + y*src->linesize[2]]; | |
- A = src->data[3][2*x + 2*y *src->linesize[3]] | |
- + src->data[3][2*x + 1 + 2*y *src->linesize[3]] | |
- + src->data[3][2*x + 1 + (2*y+1)*src->linesize[3]] | |
- + src->data[3][2*x + (2*y+1)*src->linesize[3]]; | |
- cb[0] = ALPHA_BLEND(A>>2, cb[0], U, 0); | |
- cr[0] = ALPHA_BLEND(A>>2, cr[0], V, 0); | |
- cb++; | |
- cr++; | |
- } | |
- cb += linesize[1] - dstw/2; | |
- cr += linesize[2] - dstw/2; | |
- } | |
+ if (w && h) | |
+ SDL_RenderFillRect(renderer, &rect); | |
} | |
static void free_picture(Frame *vp) | |
{ | |
if (vp->bmp) { | |
- SDL_FreeYUVOverlay(vp->bmp); | |
+ SDL_DestroyTexture(vp->bmp); | |
vp->bmp = NULL; | |
} | |
} | |
+static int realloc_texture(SDL_Texture **texture, Uint32 new_format, int new_width, int new_height, SDL_BlendMode blendmode, int init_texture) | |
+{ | |
+ Uint32 format; | |
+ int access, w, h; | |
+ if (SDL_QueryTexture(*texture, &format, &access, &w, &h) < 0 || new_width != w || new_height != h || new_format != format) { | |
+ void *pixels; | |
+ int pitch; | |
+ SDL_DestroyTexture(*texture); | |
+ if (!(*texture = SDL_CreateTexture(renderer, new_format, SDL_TEXTUREACCESS_STREAMING, new_width, new_height))) | |
+ return -1; | |
+ if (SDL_SetTextureBlendMode(*texture, blendmode) < 0) | |
+ return -1; | |
+ if (init_texture) { | |
+ if (SDL_LockTexture(*texture, NULL, &pixels, &pitch) < 0) | |
+ return -1; | |
+ memset(pixels, 0, pitch * new_height); | |
+ SDL_UnlockTexture(*texture); | |
+ } | |
+ } | |
+ return 0; | |
+} | |
+ | |
static void calculate_display_rect(SDL_Rect *rect, | |
int scr_xleft, int scr_ytop, int scr_width, int scr_height, | |
int pic_width, int pic_height, AVRational pic_sar) | |
@@ -940,12 +868,44 @@ static void calculate_display_rect(SDL_Rect *rect, | |
rect->h = FFMAX(height, 1); | |
} | |
+static int upload_texture(SDL_Texture *tex, AVFrame *frame, struct SwsContext **img_convert_ctx) { | |
+ int ret = 0; | |
+ switch (frame->format) { | |
+ case AV_PIX_FMT_YUV420P: | |
+ ret = SDL_UpdateYUVTexture(tex, NULL, frame->data[0], frame->linesize[0], | |
+ frame->data[1], frame->linesize[1], | |
+ frame->data[2], frame->linesize[2]); | |
+ break; | |
+ case AV_PIX_FMT_BGRA: | |
+ ret = SDL_UpdateTexture(tex, NULL, frame->data[0], frame->linesize[0]); | |
+ break; | |
+ default: | |
+ /* This should only happen if we are not using avfilter... */ | |
+ *img_convert_ctx = sws_getCachedContext(*img_convert_ctx, | |
+ frame->width, frame->height, frame->format, frame->width, frame->height, | |
+ AV_PIX_FMT_BGRA, sws_flags, NULL, NULL, NULL); | |
+ if (*img_convert_ctx != NULL) { | |
+ uint8_t *pixels; | |
+ int pitch; | |
+ if (!SDL_LockTexture(tex, NULL, (void **)&pixels, &pitch)) { | |
+ sws_scale(*img_convert_ctx, (const uint8_t * const *)frame->data, frame->linesize, | |
+ 0, frame->height, &pixels, &pitch); | |
+ SDL_UnlockTexture(tex); | |
+ } | |
+ } else { | |
+ av_log(NULL, AV_LOG_FATAL, "Cannot initialize the conversion context\n"); | |
+ ret = -1; | |
+ } | |
+ break; | |
+ } | |
+ return ret; | |
+} | |
+ | |
static void video_image_display(VideoState *is) | |
{ | |
Frame *vp; | |
- Frame *sp; | |
+ Frame *sp = NULL; | |
SDL_Rect rect; | |
- int i; | |
vp = frame_queue_peek(&is->pictq); | |
if (vp->bmp) { | |
@@ -954,36 +914,71 @@ static void video_image_display(VideoState *is) | |
sp = frame_queue_peek(&is->subpq); | |
if (vp->pts >= sp->pts + ((float) sp->sub.start_display_time / 1000)) { | |
- uint8_t *data[4]; | |
- int linesize[4]; | |
- | |
- SDL_LockYUVOverlay (vp->bmp); | |
- | |
- data[0] = vp->bmp->pixels[0]; | |
- data[1] = vp->bmp->pixels[2]; | |
- data[2] = vp->bmp->pixels[1]; | |
- | |
- linesize[0] = vp->bmp->pitches[0]; | |
- linesize[1] = vp->bmp->pitches[2]; | |
- linesize[2] = vp->bmp->pitches[1]; | |
- | |
- for (i = 0; i < sp->sub.num_rects; i++) | |
- blend_subrect(data, linesize, sp->subrects[i], | |
- vp->bmp->w, vp->bmp->h); | |
- | |
- SDL_UnlockYUVOverlay (vp->bmp); | |
- } | |
+ if (!sp->uploaded) { | |
+ uint8_t *pixels; | |
+ int pitch; | |
+ int i; | |
+ if (!sp->width || !sp->height) { | |
+ sp->width = vp->width; | |
+ sp->height = vp->height; | |
+ } | |
+ if (realloc_texture(&is->sub_texture, SDL_PIXELFORMAT_ARGB8888, sp->width, sp->height, SDL_BLENDMODE_BLEND, 1) < 0) | |
+ return; | |
+ | |
+ for (i = 0; i < sp->sub.num_rects; i++) { | |
+ AVSubtitleRect *sub_rect = sp->sub.rects[i]; | |
+ | |
+ sub_rect->x = av_clip(sub_rect->x, 0, sp->width ); | |
+ sub_rect->y = av_clip(sub_rect->y, 0, sp->height); | |
+ sub_rect->w = av_clip(sub_rect->w, 0, sp->width - sub_rect->x); | |
+ sub_rect->h = av_clip(sub_rect->h, 0, sp->height - sub_rect->y); | |
+ | |
+ is->sub_convert_ctx = sws_getCachedContext(is->sub_convert_ctx, | |
+ sub_rect->w, sub_rect->h, AV_PIX_FMT_PAL8, | |
+ sub_rect->w, sub_rect->h, AV_PIX_FMT_BGRA, | |
+ 0, NULL, NULL, NULL); | |
+ if (!is->sub_convert_ctx) { | |
+ av_log(NULL, AV_LOG_FATAL, "Cannot initialize the conversion context\n"); | |
+ return; | |
+ } | |
+ if (!SDL_LockTexture(is->sub_texture, (SDL_Rect *)sub_rect, (void **)&pixels, &pitch)) { | |
+ sws_scale(is->sub_convert_ctx, (const uint8_t * const *)sub_rect->data, sub_rect->linesize, | |
+ 0, sub_rect->h, &pixels, &pitch); | |
+ SDL_UnlockTexture(is->sub_texture); | |
+ } | |
+ } | |
+ sp->uploaded = 1; | |
+ } | |
+ } else | |
+ sp = NULL; | |
} | |
} | |
calculate_display_rect(&rect, is->xleft, is->ytop, is->width, is->height, vp->width, vp->height, vp->sar); | |
- SDL_DisplayYUVOverlay(vp->bmp, &rect); | |
+ if (!vp->uploaded) { | |
+ if (upload_texture(vp->bmp, vp->frame, &is->img_convert_ctx) < 0) | |
+ return; | |
+ vp->uploaded = 1; | |
+ } | |
- if (rect.x != is->last_display_rect.x || rect.y != is->last_display_rect.y || rect.w != is->last_display_rect.w || rect.h != is->last_display_rect.h || is->force_refresh) { | |
- int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00); | |
- fill_border(is->xleft, is->ytop, is->width, is->height, rect.x, rect.y, rect.w, rect.h, bgcolor, 1); | |
- is->last_display_rect = rect; | |
+ SDL_RenderCopy(renderer, vp->bmp, NULL, &rect); | |
+ if (sp) { | |
+#if USE_ONEPASS_SUBTITLE_RENDER | |
+ SDL_RenderCopy(renderer, is->sub_texture, NULL, &rect); | |
+#else | |
+ int i; | |
+ double xratio = (double)rect.w / (double)sp->width; | |
+ double yratio = (double)rect.h / (double)sp->height; | |
+ for (i = 0; i < sp->sub.num_rects; i++) { | |
+ SDL_Rect *sub_rect = (SDL_Rect*)sp->sub.rects[i]; | |
+ SDL_Rect target = {.x = rect.x + sub_rect->x * xratio, | |
+ .y = rect.y + sub_rect->y * yratio, | |
+ .w = sub_rect->w * xratio, | |
+ .h = sub_rect->h * yratio}; | |
+ SDL_RenderCopy(renderer, is->sub_texture, sub_rect, &target); | |
+ } | |
+#endif | |
} | |
} | |
} | |
@@ -996,7 +991,7 @@ static inline int compute_mod(int a, int b) | |
static void video_audio_display(VideoState *s) | |
{ | |
int i, i_start, x, y1, y, ys, delay, n, nb_display_channels; | |
- int ch, channels, h, h2, bgcolor, fgcolor; | |
+ int ch, channels, h, h2; | |
int64_t time_diff; | |
int rdft_bits, nb_freq; | |
@@ -1046,13 +1041,8 @@ static void video_audio_display(VideoState *s) | |
i_start = s->last_i_start; | |
} | |
- bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00); | |
if (s->show_mode == SHOW_MODE_WAVES) { | |
- fill_rectangle(screen, | |
- s->xleft, s->ytop, s->width, s->height, | |
- bgcolor, 0); | |
- | |
- fgcolor = SDL_MapRGB(screen->format, 0xff, 0xff, 0xff); | |
+ SDL_SetRenderDrawColor(renderer, 255, 255, 255, 255); | |
/* total height for one channel */ | |
h = s->height / nb_display_channels; | |
@@ -1069,25 +1059,23 @@ static void video_audio_display(VideoState *s) | |
} else { | |
ys = y1; | |
} | |
- fill_rectangle(screen, | |
- s->xleft + x, ys, 1, y, | |
- fgcolor, 0); | |
+ fill_rectangle(s->xleft + x, ys, 1, y); | |
i += channels; | |
if (i >= SAMPLE_ARRAY_SIZE) | |
i -= SAMPLE_ARRAY_SIZE; | |
} | |
} | |
- fgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0xff); | |
+ SDL_SetRenderDrawColor(renderer, 0, 0, 255, 255); | |
for (ch = 1; ch < nb_display_channels; ch++) { | |
y = s->ytop + ch * h; | |
- fill_rectangle(screen, | |
- s->xleft, y, s->width, 1, | |
- fgcolor, 0); | |
+ fill_rectangle(s->xleft, y, s->width, 1); | |
} | |
- SDL_UpdateRect(screen, s->xleft, s->ytop, s->width, s->height); | |
} else { | |
+ if (realloc_texture(&s->vis_texture, SDL_PIXELFORMAT_ARGB8888, s->width, s->height, SDL_BLENDMODE_NONE, 1) < 0) | |
+ return; | |
+ | |
nb_display_channels= FFMIN(nb_display_channels, 2); | |
if (rdft_bits != s->rdft_bits) { | |
av_rdft_end(s->rdft); | |
@@ -1101,6 +1089,9 @@ static void video_audio_display(VideoState *s) | |
s->show_mode = SHOW_MODE_WAVES; | |
} else { | |
FFTSample *data[2]; | |
+ SDL_Rect rect = {.x = s->xpos, .y = 0, .w = 1, .h = s->height}; | |
+ uint32_t *pixels; | |
+ int pitch; | |
for (ch = 0; ch < nb_display_channels; ch++) { | |
data[ch] = s->rdft_data + 2 * nb_freq * ch; | |
i = i_start + ch; | |
@@ -1115,21 +1106,23 @@ static void video_audio_display(VideoState *s) | |
} | |
/* Least efficient way to do this, we should of course | |
* directly access it but it is more than fast enough. */ | |
- for (y = 0; y < s->height; y++) { | |
- double w = 1 / sqrt(nb_freq); | |
- int a = sqrt(w * hypot(data[0][2 * y + 0], data[0][2 * y + 1])); | |
- int b = (nb_display_channels == 2 ) ? sqrt(w * hypot(data[1][2 * y + 0], data[1][2 * y + 1])) | |
- : a; | |
- a = FFMIN(a, 255); | |
- b = FFMIN(b, 255); | |
- fgcolor = SDL_MapRGB(screen->format, a, b, (a + b) / 2); | |
- | |
- fill_rectangle(screen, | |
- s->xpos, s->height-y, 1, 1, | |
- fgcolor, 0); | |
+ if (!SDL_LockTexture(s->vis_texture, &rect, (void **)&pixels, &pitch)) { | |
+ pitch >>= 2; | |
+ pixels += pitch * s->height; | |
+ for (y = 0; y < s->height; y++) { | |
+ double w = 1 / sqrt(nb_freq); | |
+ int a = sqrt(w * sqrt(data[0][2 * y + 0] * data[0][2 * y + 0] + data[0][2 * y + 1] * data[0][2 * y + 1])); | |
+ int b = (nb_display_channels == 2 ) ? sqrt(w * hypot(data[1][2 * y + 0], data[1][2 * y + 1])) | |
+ : a; | |
+ a = FFMIN(a, 255); | |
+ b = FFMIN(b, 255); | |
+ pixels -= pitch; | |
+ *pixels = (a << 16) + (b << 8) + ((a+b) >> 1); | |
+ } | |
+ SDL_UnlockTexture(s->vis_texture); | |
} | |
+ SDL_RenderCopy(renderer, s->vis_texture, NULL, NULL); | |
} | |
- SDL_UpdateRect(screen, s->xpos, s->ytop, 1, s->height); | |
if (!s->paused) | |
s->xpos++; | |
if (s->xpos >= s->width) | |
@@ -1219,11 +1212,13 @@ static void stream_close(VideoState *is) | |
frame_queue_destory(&is->sampq); | |
frame_queue_destory(&is->subpq); | |
SDL_DestroyCond(is->continue_read_thread); | |
-#if !CONFIG_AVFILTER | |
sws_freeContext(is->img_convert_ctx); | |
-#endif | |
sws_freeContext(is->sub_convert_ctx); | |
av_free(is->filename); | |
+ if (is->vis_texture) | |
+ SDL_DestroyTexture(is->vis_texture); | |
+ if (is->sub_texture) | |
+ SDL_DestroyTexture(is->sub_texture); | |
av_free(is); | |
} | |
@@ -1232,6 +1227,10 @@ static void do_exit(VideoState *is) | |
if (is) { | |
stream_close(is); | |
} | |
+ if (renderer) | |
+ SDL_DestroyRenderer(renderer); | |
+ if (window) | |
+ SDL_DestroyWindow(window); | |
av_lockmgr_register(NULL); | |
uninit_opts(); | |
#if CONFIG_AVFILTER | |
@@ -1258,42 +1257,48 @@ static void set_default_window_size(int width, int height, AVRational sar) | |
default_height = rect.h; | |
} | |
-static int video_open(VideoState *is, int force_set_video_mode, Frame *vp) | |
+static int video_open(VideoState *is, Frame *vp) | |
{ | |
- int flags = SDL_HWSURFACE | SDL_ASYNCBLIT | SDL_HWACCEL; | |
int w,h; | |
- if (is_full_screen) flags |= SDL_FULLSCREEN; | |
- else flags |= SDL_RESIZABLE; | |
- | |
if (vp && vp->width) | |
set_default_window_size(vp->width, vp->height, vp->sar); | |
- if (is_full_screen && fs_screen_width) { | |
- w = fs_screen_width; | |
- h = fs_screen_height; | |
- } else if (!is_full_screen && screen_width) { | |
+ if (screen_width) { | |
w = screen_width; | |
h = screen_height; | |
} else { | |
w = default_width; | |
h = default_height; | |
} | |
- w = FFMIN(16383, w); | |
- if (screen && is->width == screen->w && screen->w == w | |
- && is->height== screen->h && screen->h == h && !force_set_video_mode) | |
- return 0; | |
- screen = SDL_SetVideoMode(w, h, 0, flags); | |
- if (!screen) { | |
+ | |
+ if (!window) { | |
+ int flags = SDL_WINDOW_SHOWN | SDL_WINDOW_RESIZABLE; | |
+ if (!window_title) | |
+ window_title = input_filename; | |
+ if (is_full_screen) | |
+ flags |= SDL_WINDOW_FULLSCREEN_DESKTOP; | |
+ window = SDL_CreateWindow(window_title, SDL_WINDOWPOS_UNDEFINED, SDL_WINDOWPOS_UNDEFINED, w, h, flags); | |
+ SDL_SetHint(SDL_HINT_RENDER_SCALE_QUALITY, "linear"); | |
+ if (window) { | |
+ SDL_RendererInfo info; | |
+ renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED | SDL_RENDERER_PRESENTVSYNC); | |
+ if (renderer) { | |
+ if (!SDL_GetRendererInfo(renderer, &info)) | |
+ av_log(NULL, AV_LOG_VERBOSE, "Initialized %s renderer.\n", info.name); | |
+ } | |
+ } | |
+ } else { | |
+ SDL_SetWindowSize(window, w, h); | |
+ } | |
+ | |
+ if (!window || !renderer) { | |
av_log(NULL, AV_LOG_FATAL, "SDL: could not set video mode - exiting\n"); | |
do_exit(is); | |
} | |
- if (!window_title) | |
- window_title = input_filename; | |
- SDL_WM_SetCaption(window_title, window_title); | |
- is->width = screen->w; | |
- is->height = screen->h; | |
+ is->width = w; | |
+ is->height = h; | |
return 0; | |
} | |
@@ -1301,12 +1306,16 @@ static int video_open(VideoState *is, int force_set_video_mode, Frame *vp) | |
/* display the current picture, if any */ | |
static void video_display(VideoState *is) | |
{ | |
- if (!screen) | |
- video_open(is, 0, NULL); | |
+ if (!window) | |
+ video_open(is, NULL); | |
+ | |
+ SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255); | |
+ SDL_RenderClear(renderer); | |
if (is->audio_st && is->show_mode != SHOW_MODE_VIDEO) | |
video_audio_display(is); | |
else if (is->video_st) | |
video_image_display(is); | |
+ SDL_RenderPresent(renderer); | |
} | |
static double get_clock(Clock *c) | |
@@ -1600,6 +1609,20 @@ retry: | |
|| (is->vidclk.pts > (sp->pts + ((float) sp->sub.end_display_time / 1000))) | |
|| (sp2 && is->vidclk.pts > (sp2->pts + ((float) sp2->sub.start_display_time / 1000)))) | |
{ | |
+ if (sp->uploaded) { | |
+ int i; | |
+ for (i = 0; i < sp->sub.num_rects; i++) { | |
+ AVSubtitleRect *sub_rect = sp->sub.rects[i]; | |
+ uint8_t *pixels; | |
+ int pitch, j; | |
+ | |
+ if (!SDL_LockTexture(is->sub_texture, (SDL_Rect *)sub_rect, (void **)&pixels, &pitch)) { | |
+ for (j = 0; j < sub_rect->h; j++, pixels += pitch) | |
+ memset(pixels, 0, sub_rect->w << 2); | |
+ SDL_UnlockTexture(is->sub_texture); | |
+ } | |
+ } | |
+ } | |
frame_queue_next(&is->subpq); | |
} else { | |
break; | |
@@ -1665,19 +1688,18 @@ display: | |
static void alloc_picture(VideoState *is) | |
{ | |
Frame *vp; | |
- int64_t bufferdiff; | |
+ int sdl_format; | |
vp = &is->pictq.queue[is->pictq.windex]; | |
- free_picture(vp); | |
+ video_open(is, vp); | |
- video_open(is, 0, vp); | |
+ if (vp->format == AV_PIX_FMT_YUV420P) | |
+ sdl_format = SDL_PIXELFORMAT_YV12; | |
+ else | |
+ sdl_format = SDL_PIXELFORMAT_ARGB8888; | |
- vp->bmp = SDL_CreateYUVOverlay(vp->width, vp->height, | |
- SDL_YV12_OVERLAY, | |
- screen); | |
- bufferdiff = vp->bmp ? FFMAX(vp->bmp->pixels[0], vp->bmp->pixels[1]) - FFMIN(vp->bmp->pixels[0], vp->bmp->pixels[1]) : 0; | |
- if (!vp->bmp || vp->bmp->pitches[0] < vp->width || bufferdiff < (int64_t)vp->height * vp->bmp->pitches[0]) { | |
+ if (realloc_texture(&vp->bmp, sdl_format, vp->width, vp->height, SDL_BLENDMODE_NONE, 0) < 0) { | |
/* SDL allocates a buffer smaller than requested if the video | |
* overlay hardware is unable to support the requested size. */ | |
av_log(NULL, AV_LOG_FATAL, | |
@@ -1693,24 +1715,6 @@ static void alloc_picture(VideoState *is) | |
SDL_UnlockMutex(is->pictq.mutex); | |
} | |
-static void duplicate_right_border_pixels(SDL_Overlay *bmp) { | |
- int i, width, height; | |
- Uint8 *p, *maxp; | |
- for (i = 0; i < 3; i++) { | |
- width = bmp->w; | |
- height = bmp->h; | |
- if (i > 0) { | |
- width >>= 1; | |
- height >>= 1; | |
- } | |
- if (bmp->pitches[i] > width) { | |
- maxp = bmp->pixels[i] + bmp->pitches[i] * height - 1; | |
- for (p = bmp->pixels[i] + width - 1; p < maxp; p += bmp->pitches[i]) | |
- *(p+1) = *p; | |
- } | |
- } | |
-} | |
- | |
static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, double duration, int64_t pos, int serial) | |
{ | |
Frame *vp; | |
@@ -1724,17 +1728,19 @@ static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, double | |
return -1; | |
vp->sar = src_frame->sample_aspect_ratio; | |
+ vp->uploaded = 0; | |
/* alloc or resize hardware picture buffer */ | |
- if (!vp->bmp || vp->reallocate || !vp->allocated || | |
+ if (!vp->bmp || !vp->allocated || | |
vp->width != src_frame->width || | |
- vp->height != src_frame->height) { | |
+ vp->height != src_frame->height || | |
+ vp->format != src_frame->format) { | |
SDL_Event event; | |
- vp->allocated = 0; | |
- vp->reallocate = 0; | |
+ vp->allocated = 0; | |
vp->width = src_frame->width; | |
vp->height = src_frame->height; | |
+ vp->format = src_frame->format; | |
/* the allocation must be done in the main thread to avoid | |
locking problems. */ | |
@@ -1748,7 +1754,7 @@ static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, double | |
SDL_CondWait(is->pictq.cond, is->pictq.mutex); | |
} | |
/* if the queue is aborted, we have to pop the pending ALLOC event or wait for the allocation to complete */ | |
- if (is->videoq.abort_request && SDL_PeepEvents(&event, 1, SDL_GETEVENT, SDL_EVENTMASK(FF_ALLOC_EVENT)) != 1) { | |
+ if (is->videoq.abort_request && SDL_PeepEvents(&event, 1, SDL_GETEVENT, FF_ALLOC_EVENT, FF_ALLOC_EVENT) != 1) { | |
while (!vp->allocated && !is->abort_request) { | |
SDL_CondWait(is->pictq.cond, is->pictq.mutex); | |
} | |
@@ -1761,58 +1767,12 @@ static int queue_picture(VideoState *is, AVFrame *src_frame, double pts, double | |
/* if the frame is not skipped, then display it */ | |
if (vp->bmp) { | |
- uint8_t *data[4]; | |
- int linesize[4]; | |
- | |
- /* get a pointer on the bitmap */ | |
- SDL_LockYUVOverlay (vp->bmp); | |
- | |
- data[0] = vp->bmp->pixels[0]; | |
- data[1] = vp->bmp->pixels[2]; | |
- data[2] = vp->bmp->pixels[1]; | |
- | |
- linesize[0] = vp->bmp->pitches[0]; | |
- linesize[1] = vp->bmp->pitches[2]; | |
- linesize[2] = vp->bmp->pitches[1]; | |
- | |
-#if CONFIG_AVFILTER | |
- // FIXME use direct rendering | |
- av_image_copy(data, linesize, (const uint8_t **)src_frame->data, src_frame->linesize, | |
- src_frame->format, vp->width, vp->height); | |
-#else | |
- { | |
- AVDictionaryEntry *e = av_dict_get(sws_dict, "sws_flags", NULL, 0); | |
- if (e) { | |
- const AVClass *class = sws_get_class(); | |
- const AVOption *o = av_opt_find(&class, "sws_flags", NULL, 0, | |
- AV_OPT_SEARCH_FAKE_OBJ); | |
- int ret = av_opt_eval_flags(&class, o, e->value, &sws_flags); | |
- if (ret < 0) | |
- exit(1); | |
- } | |
- } | |
- | |
- is->img_convert_ctx = sws_getCachedContext(is->img_convert_ctx, | |
- vp->width, vp->height, src_frame->format, vp->width, vp->height, | |
- AV_PIX_FMT_YUV420P, sws_flags, NULL, NULL, NULL); | |
- if (!is->img_convert_ctx) { | |
- av_log(NULL, AV_LOG_FATAL, "Cannot initialize the conversion context\n"); | |
- exit(1); | |
- } | |
- sws_scale(is->img_convert_ctx, src_frame->data, src_frame->linesize, | |
- 0, vp->height, data, linesize); | |
-#endif | |
- /* workaround SDL PITCH_WORKAROUND */ | |
- duplicate_right_border_pixels(vp->bmp); | |
- /* update the bitmap content */ | |
- SDL_UnlockYUVOverlay(vp->bmp); | |
- | |
vp->pts = pts; | |
vp->duration = duration; | |
vp->pos = pos; | |
vp->serial = serial; | |
- /* now we can update the picture count */ | |
+ av_frame_move_ref(vp->frame, src_frame); | |
frame_queue_push(&is->pictq); | |
} | |
return 0; | |
@@ -1900,7 +1860,7 @@ fail: | |
static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const char *vfilters, AVFrame *frame) | |
{ | |
- static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_NONE }; | |
+ static const enum AVPixelFormat pix_fmts[] = { AV_PIX_FMT_YUV420P, AV_PIX_FMT_BGRA, AV_PIX_FMT_NONE }; | |
char sws_flags_str[512] = ""; | |
char buffersrc_args[256]; | |
int ret; | |
@@ -1963,10 +1923,6 @@ static int configure_video_filters(AVFilterGraph *graph, VideoState *is, const c | |
last_filter = filt_ctx; \ | |
} while (0) | |
- /* SDL YUV code is not handling odd width/height for some driver | |
- * combinations, therefore we crop the picture to an even width/height. */ | |
- INSERT_FILT("crop", "floor(in_w/2)*2:floor(in_h/2)*2"); | |
- | |
if (autorotate) { | |
double theta = get_rotation(is->video_st); | |
@@ -2161,7 +2117,7 @@ static int audio_thread(void *arg) | |
static int decoder_start(Decoder *d, int (*fn)(void *), void *arg) | |
{ | |
packet_queue_start(d->queue); | |
- d->decoder_tid = SDL_CreateThread(fn, arg); | |
+ d->decoder_tid = SDL_CreateThread(fn, "decoder", arg); | |
if (!d->decoder_tid) { | |
av_log(NULL, AV_LOG_ERROR, "SDL_CreateThread(): %s\n", SDL_GetError()); | |
return AVERROR(ENOMEM); | |
@@ -2281,10 +2237,10 @@ static int video_thread(void *arg) | |
static int subtitle_thread(void *arg) | |
{ | |
VideoState *is = arg; | |
+ AVCodecContext *dec = is->subtitle_st->codec; | |
Frame *sp; | |
int got_subtitle; | |
double pts; | |
- int i; | |
for (;;) { | |
if (!(sp = frame_queue_peek_writable(&is->subpq))) | |
@@ -2300,42 +2256,9 @@ static int subtitle_thread(void *arg) | |
pts = sp->sub.pts / (double)AV_TIME_BASE; | |
sp->pts = pts; | |
sp->serial = is->subdec.pkt_serial; | |
- if (!(sp->subrects = av_mallocz_array(sp->sub.num_rects, sizeof(AVSubtitleRect*)))) { | |
- av_log(NULL, AV_LOG_FATAL, "Cannot allocate subrects\n"); | |
- exit(1); | |
- } | |
- | |
- for (i = 0; i < sp->sub.num_rects; i++) | |
- { | |
- int in_w = sp->sub.rects[i]->w; | |
- int in_h = sp->sub.rects[i]->h; | |
- int subw = is->subdec.avctx->width ? is->subdec.avctx->width : is->viddec_width; | |
- int subh = is->subdec.avctx->height ? is->subdec.avctx->height : is->viddec_height; | |
- int out_w = is->viddec_width ? in_w * is->viddec_width / subw : in_w; | |
- int out_h = is->viddec_height ? in_h * is->viddec_height / subh : in_h; | |
- | |
- if (!(sp->subrects[i] = av_mallocz(sizeof(AVSubtitleRect))) || | |
- av_image_alloc(sp->subrects[i]->data, sp->subrects[i]->linesize, out_w, out_h, AV_PIX_FMT_YUVA420P, 16) < 0) { | |
- av_log(NULL, AV_LOG_FATAL, "Cannot allocate subtitle data\n"); | |
- exit(1); | |
- } | |
- | |
- is->sub_convert_ctx = sws_getCachedContext(is->sub_convert_ctx, | |
- in_w, in_h, AV_PIX_FMT_PAL8, out_w, out_h, | |
- AV_PIX_FMT_YUVA420P, sws_flags, NULL, NULL, NULL); | |
- if (!is->sub_convert_ctx) { | |
- av_log(NULL, AV_LOG_FATAL, "Cannot initialize the sub conversion context\n"); | |
- exit(1); | |
- } | |
- sws_scale(is->sub_convert_ctx, | |
- (void*)sp->sub.rects[i]->data, sp->sub.rects[i]->linesize, | |
- 0, in_h, sp->subrects[i]->data, sp->subrects[i]->linesize); | |
- | |
- sp->subrects[i]->w = out_w; | |
- sp->subrects[i]->h = out_h; | |
- sp->subrects[i]->x = sp->sub.rects[i]->x * out_w / in_w; | |
- sp->subrects[i]->y = sp->sub.rects[i]->y * out_h / in_h; | |
- } | |
+ sp->width = dec->width; | |
+ sp->height = dec->height; | |
+ sp->uploaded = 0; | |
/* now we can update the picture count */ | |
frame_queue_push(&is->subpq); | |
@@ -3192,7 +3115,7 @@ static VideoState *stream_open(const char *filename, AVInputFormat *iformat) | |
is->audio_volume = SDL_MIX_MAXVOLUME; | |
is->muted = 0; | |
is->av_sync_type = av_sync_type; | |
- is->read_tid = SDL_CreateThread(read_thread, is); | |
+ is->read_tid = SDL_CreateThread(read_thread, "read_thread", is); | |
if (!is->read_tid) { | |
av_log(NULL, AV_LOG_FATAL, "SDL_CreateThread(): %s\n", SDL_GetError()); | |
fail: | |
@@ -3283,27 +3206,17 @@ static void stream_cycle_channel(VideoState *is, int codec_type) | |
static void toggle_full_screen(VideoState *is) | |
{ | |
-#if defined(__APPLE__) && SDL_VERSION_ATLEAST(1, 2, 14) | |
- /* OS X needs to reallocate the SDL overlays */ | |
- int i; | |
- for (i = 0; i < VIDEO_PICTURE_QUEUE_SIZE; i++) | |
- is->pictq.queue[i].reallocate = 1; | |
-#endif | |
is_full_screen = !is_full_screen; | |
- video_open(is, 1, NULL); | |
+ SDL_SetWindowFullscreen(window, is_full_screen ? SDL_WINDOW_FULLSCREEN_DESKTOP : 0); | |
} | |
static void toggle_audio_display(VideoState *is) | |
{ | |
- int bgcolor = SDL_MapRGB(screen->format, 0x00, 0x00, 0x00); | |
int next = is->show_mode; | |
do { | |
next = (next + 1) % SHOW_MODE_NB; | |
} while (next != is->show_mode && (next == SHOW_MODE_VIDEO && !is->video_st || next != SHOW_MODE_VIDEO && !is->audio_st)); | |
if (is->show_mode != next) { | |
- fill_rectangle(screen, | |
- is->xleft, is->ytop, is->width, is->height, | |
- bgcolor, 1); | |
is->force_refresh = 1; | |
is->show_mode = next; | |
} | |
@@ -3312,7 +3225,7 @@ static void toggle_audio_display(VideoState *is) | |
static void refresh_loop_wait_event(VideoState *is, SDL_Event *event) { | |
double remaining_time = 0.0; | |
SDL_PumpEvents(); | |
- while (!SDL_PeepEvents(event, 1, SDL_GETEVENT, SDL_ALLEVENTS)) { | |
+ while (!SDL_PeepEvents(event, 1, SDL_GETEVENT, SDL_FIRSTEVENT, SDL_LASTEVENT)) { | |
if (!cursor_hidden && av_gettime_relative() - cursor_last_shown > CURSOR_HIDE_DELAY) { | |
SDL_ShowCursor(0); | |
cursor_hidden = 1; | |
@@ -3476,9 +3389,6 @@ static void event_loop(VideoState *cur_stream) | |
break; | |
} | |
break; | |
- case SDL_VIDEOEXPOSE: | |
- cur_stream->force_refresh = 1; | |
- break; | |
case SDL_MOUSEBUTTONDOWN: | |
if (exit_on_mousedown) { | |
do_exit(cur_stream); | |
@@ -3534,16 +3444,18 @@ static void event_loop(VideoState *cur_stream) | |
stream_seek(cur_stream, ts, 0, 0); | |
} | |
break; | |
- case SDL_VIDEORESIZE: | |
- screen = SDL_SetVideoMode(FFMIN(16383, event.resize.w), event.resize.h, 0, | |
- SDL_HWSURFACE|(is_full_screen?SDL_FULLSCREEN:SDL_RESIZABLE)|SDL_ASYNCBLIT|SDL_HWACCEL); | |
- if (!screen) { | |
- av_log(NULL, AV_LOG_FATAL, "Failed to set video mode\n"); | |
- do_exit(cur_stream); | |
- } | |
- screen_width = cur_stream->width = screen->w; | |
- screen_height = cur_stream->height = screen->h; | |
- cur_stream->force_refresh = 1; | |
+ case SDL_WINDOWEVENT: | |
+ switch (event.window.event) { | |
+ case SDL_WINDOWEVENT_RESIZED: | |
+ screen_width = cur_stream->width = event.window.data1; | |
+ screen_height = cur_stream->height = event.window.data2; | |
+ if (cur_stream->vis_texture) { | |
+ SDL_DestroyTexture(cur_stream->vis_texture); | |
+ cur_stream->vis_texture = NULL; | |
+ } | |
+ case SDL_WINDOWEVENT_EXPOSED: | |
+ cur_stream->force_refresh = 1; | |
+ } | |
break; | |
case SDL_QUIT: | |
case FF_QUIT_EVENT: | |
@@ -3780,7 +3692,6 @@ int main(int argc, char **argv) | |
{ | |
int flags; | |
VideoState *is; | |
- char dummy_videodriver[] = "SDL_VIDEODRIVER=dummy"; | |
char alsa_bufsize[] = "SDL_AUDIO_ALSA_SET_BUFFER_SIZE=1"; | |
av_log_set_flags(AV_LOG_SKIP_REPEATED); | |
@@ -3822,32 +3733,20 @@ int main(int argc, char **argv) | |
else { | |
/* Try to work around an occasional ALSA buffer underflow issue when the | |
* period size is NPOT due to ALSA resampling by forcing the buffer size. */ | |
- if (!SDL_getenv("SDL_AUDIO_ALSA_SET_BUFFER_SIZE")) | |
- SDL_putenv(alsa_bufsize); | |
+ //if (!SDL_getenv("SDL_AUDIO_ALSA_SET_BUFFER_SIZE")) | |
+ // SDL_putenv(alsa_bufsize); | |
} | |
if (display_disable) | |
- SDL_putenv(dummy_videodriver); /* For the event queue, we always need a video driver. */ | |
-#if !defined(_WIN32) && !defined(__APPLE__) | |
- flags |= SDL_INIT_EVENTTHREAD; /* Not supported on Windows or Mac OS X */ | |
-#endif | |
+ flags &= ~SDL_INIT_VIDEO; | |
if (SDL_Init (flags)) { | |
av_log(NULL, AV_LOG_FATAL, "Could not initialize SDL - %s\n", SDL_GetError()); | |
av_log(NULL, AV_LOG_FATAL, "(Did you set the DISPLAY variable?)\n"); | |
exit(1); | |
} | |
- if (!display_disable) { | |
- const SDL_VideoInfo *vi = SDL_GetVideoInfo(); | |
- fs_screen_width = vi->current_w; | |
- fs_screen_height = vi->current_h; | |
- } | |
- | |
- SDL_EventState(SDL_ACTIVEEVENT, SDL_IGNORE); | |
SDL_EventState(SDL_SYSWMEVENT, SDL_IGNORE); | |
SDL_EventState(SDL_USEREVENT, SDL_IGNORE); | |
- SDL_EnableKeyRepeat(SDL_DEFAULT_REPEAT_DELAY, SDL_DEFAULT_REPEAT_INTERVAL); | |
- | |
if (av_lockmgr_register(lockmgr)) { | |
av_log(NULL, AV_LOG_FATAL, "Could not initialize lock manager!\n"); | |
do_exit(NULL); | |
-- | |
2.6.4 (Apple Git-63) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
From 526e4da8afc9198bcbe86e73e96020e4199fec48 Mon Sep 17 00:00:00 2001 | |
From: Josh de Kock <josh@itanimul.li> | |
Date: Mon, 16 May 2016 02:30:17 +0100 | |
Subject: [PATCH 2/3] avdevice/sdl2: add sdl2 device | |
--- | |
libavdevice/alldevices.c | 1 + | |
libavdevice/sdl2.c | 373 +++++++++++++++++++++++++++++++++++++++++++++++ | |
2 files changed, 374 insertions(+) | |
create mode 100644 libavdevice/sdl2.c | |
diff --git a/libavdevice/alldevices.c b/libavdevice/alldevices.c | |
index 26aecf2..c0a9d9a 100644 | |
--- a/libavdevice/alldevices.c | |
+++ b/libavdevice/alldevices.c | |
@@ -64,6 +64,7 @@ void avdevice_register_all(void) | |
REGISTER_INOUTDEV(PULSE, pulse); | |
REGISTER_INDEV (QTKIT, qtkit); | |
REGISTER_OUTDEV (SDL, sdl); | |
+ REGISTER_OUTDEV (SDL2, sdl2); | |
REGISTER_INOUTDEV(SNDIO, sndio); | |
REGISTER_INOUTDEV(V4L2, v4l2); | |
// REGISTER_INDEV (V4L, v4l | |
diff --git a/libavdevice/sdl2.c b/libavdevice/sdl2.c | |
new file mode 100644 | |
index 0000000..4b0dea0 | |
--- /dev/null | |
+++ b/libavdevice/sdl2.c | |
@@ -0,0 +1,373 @@ | |
+/* | |
+ * Copyright (c) 2011 Stefano Sabatini | |
+ * | |
+ * This file is part of FFmpeg. | |
+ * | |
+ * FFmpeg is free software; you can redistribute it and/or | |
+ * modify it under the terms of the GNU Lesser General Public | |
+ * License as published by the Free Software Foundation; either | |
+ * version 2.1 of the License, or (at your option) any later version. | |
+ * | |
+ * FFmpeg is distributed in the hope that it will be useful, | |
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of | |
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
+ * Lesser General Public License for more details. | |
+ * | |
+ * You should have received a copy of the GNU Lesser General Public | |
+ * License along with FFmpeg; if not, write to the Free Software | |
+ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | |
+ */ | |
+ | |
+/** | |
+ * @file | |
+ * libSDL output device | |
+ */ | |
+ | |
+#include <SDL.h> | |
+#include <SDL_thread.h> | |
+ | |
+#include "libavutil/avstring.h" | |
+#include "libavutil/imgutils.h" | |
+#include "libavutil/opt.h" | |
+#include "libavutil/parseutils.h" | |
+#include "libavutil/pixdesc.h" | |
+#include "libavutil/time.h" | |
+#include "avdevice.h" | |
+ | |
+typedef struct { | |
+ AVClass *class; | |
+ SDL_Window *window; | |
+ SDL_Renderer *renderer; | |
+ char *window_title; | |
+ int window_width, window_height; /**< size of the window */ | |
+ int window_fullscreen; | |
+ int window_borderless; | |
+ | |
+ SDL_Texture *texture; | |
+ int texture_fmt; | |
+ SDL_Rect texture_rect; | |
+ | |
+ int inited; | |
+ SDL_Thread *event_thread; | |
+ SDL_mutex *mutex; | |
+ SDL_cond *init_cond; | |
+ int quit; | |
+} SDLContext; | |
+ | |
+static const struct sdl_texture_pix_fmt_entry { | |
+ enum AVPixelFormat pix_fmt; int texture_fmt; | |
+} sdl_texture_pix_fmt_map[] = { | |
+ { AV_PIX_FMT_RGB8, SDL_PIXELFORMAT_RGB332 }, | |
+ { AV_PIX_FMT_RGB444, SDL_PIXELFORMAT_RGB444 }, | |
+ { AV_PIX_FMT_RGB555, SDL_PIXELFORMAT_RGB555 }, | |
+ { AV_PIX_FMT_BGR555, SDL_PIXELFORMAT_BGR555 }, | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ARGB4444 }, // these aren't implemented in FFmpeg | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_RGBA4444 }, // but keeping them here in-case they | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ABGR4444 }, // are | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_BGRA4444 }, | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ARGB1555 }, | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_RGBA5551 }, | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ABGR1555 }, | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_BGRA5551 }, | |
+ { AV_PIX_FMT_RGB565, SDL_PIXELFORMAT_RGB565 }, | |
+ { AV_PIX_FMT_BGR565, SDL_PIXELFORMAT_BGR565 }, | |
+ { AV_PIX_FMT_RGB24, SDL_PIXELFORMAT_RGB24 }, | |
+ { AV_PIX_FMT_BGR24, SDL_PIXELFORMAT_BGR24 }, | |
+ { AV_PIX_FMT_RGB24, SDL_PIXELFORMAT_RGB888 }, // is this right? | |
+ { AV_PIX_FMT_RGB0, SDL_PIXELFORMAT_RGBX8888 }, | |
+ { AV_PIX_FMT_BGR24, SDL_PIXELFORMAT_BGR888 }, // is this right? | |
+ { AV_PIX_FMT_BGR0, SDL_PIXELFORMAT_BGRX8888 }, | |
+ { AV_PIX_FMT_ARGB, SDL_PIXELFORMAT_ARGB8888 }, | |
+ { AV_PIX_FMT_RGBA, SDL_PIXELFORMAT_RGBA8888 }, | |
+ { AV_PIX_FMT_ABGR, SDL_PIXELFORMAT_ABGR8888 }, | |
+ { AV_PIX_FMT_BGRA, SDL_PIXELFORMAT_BGRA8888 }, | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ARGB2101010 }, // not implemented | |
+ { AV_PIX_FMT_YUV420P, SDL_PIXELFORMAT_IYUV }, | |
+ { AV_PIX_FMT_YUYV422, SDL_PIXELFORMAT_YUY2 }, | |
+ { AV_PIX_FMT_UYVY422, SDL_PIXELFORMAT_UYVY }, | |
+ { AV_PIX_FMT_NONE, 0 }, | |
+}; | |
+ | |
+static void compute_texture_rect(AVFormatContext *s) | |
+{ | |
+ AVRational sar, dar; /* sample and display aspect ratios */ | |
+ SDLContext *sdl = s->priv_data; | |
+ AVStream *st = s->streams[0]; | |
+ AVCodecContext *encctx = st->codec; | |
+ SDL_Rect *texture_rect = &sdl->texture_rect; | |
+ | |
+ /* compute texture width and height from the codec context information */ | |
+ sar = st->sample_aspect_ratio.num ? st->sample_aspect_ratio : (AVRational){ 1, 1 }; | |
+ dar = av_mul_q(sar, (AVRational){ encctx->width, encctx->height }); | |
+ | |
+ /* we suppose the screen has a 1/1 sample aspect ratio */ | |
+ if (sdl->window_width && sdl->window_height) { | |
+ /* fit in the window */ | |
+ if (av_cmp_q(dar, (AVRational){ sdl->window_width, sdl->window_height }) > 0) { | |
+ /* fit in width */ | |
+ texture_rect->w = sdl->window_width; | |
+ texture_rect->h = av_rescale(texture_rect->w, dar.den, dar.num); | |
+ } else { | |
+ /* fit in height */ | |
+ texture_rect->h = sdl->window_height; | |
+ texture_rect->w = av_rescale(texture_rect->h, dar.num, dar.den); | |
+ } | |
+ } else { | |
+ if (sar.num > sar.den) { | |
+ texture_rect->w = encctx->width; | |
+ texture_rect->h = av_rescale(texture_rect->w, dar.den, dar.num); | |
+ } else { | |
+ texture_rect->h = encctx->height; | |
+ texture_rect->w = av_rescale(texture_rect->h, dar.num, dar.den); | |
+ } | |
+ sdl->window_width = texture_rect->w; | |
+ sdl->window_height = texture_rect->h; | |
+ } | |
+ | |
+ texture_rect->x = (sdl->window_width - texture_rect->w) / 2; | |
+ texture_rect->y = (sdl->window_height - texture_rect->h) / 2; | |
+} | |
+ | |
+static int sdl2_write_trailer(AVFormatContext *s) | |
+{ | |
+ SDLContext *sdl = s->priv_data; | |
+ | |
+ sdl->quit = 1; | |
+ | |
+ if (sdl->texture) | |
+ SDL_DestroyTexture(sdl->texture); | |
+ sdl->texture = NULL; | |
+ if (sdl->event_thread) | |
+ SDL_WaitThread(sdl->event_thread, NULL); | |
+ sdl->event_thread = NULL; | |
+ if (sdl->mutex) | |
+ SDL_DestroyMutex(sdl->mutex); | |
+ sdl->mutex = NULL; | |
+ if (sdl->init_cond) | |
+ SDL_DestroyCond(sdl->init_cond); | |
+ sdl->init_cond = NULL; | |
+ | |
+ if (sdl->renderer) | |
+ SDL_DestroyRenderer(sdl->renderer); | |
+ sdl->renderer = NULL; | |
+ | |
+ if (sdl->window) | |
+ SDL_DestroyWindow(sdl->window); | |
+ sdl->window = NULL; | |
+ | |
+ if (!sdl->inited) | |
+ SDL_Quit(); | |
+ | |
+ return 0; | |
+} | |
+ | |
+#define SDL_BASE_FLAGS (SDL_SWSURFACE|SDL_WINDOW_RESIZABLE) | |
+ | |
+static int sdl2_write_header(AVFormatContext *s) | |
+{ | |
+ SDLContext *sdl = s->priv_data; | |
+ AVStream *st = s->streams[0]; | |
+ AVCodecContext *encctx = st->codec; | |
+ int i, ret = 0; | |
+ | |
+ if (!sdl->window_title) | |
+ sdl->window_title = av_strdup(s->filename); | |
+ | |
+ if (SDL_WasInit(SDL_INIT_VIDEO)) { | |
+ av_log(s, AV_LOG_WARNING, | |
+ "SDL video subsystem was already inited, you could have multiple SDL outputs. This may cause unknown behaviour.\n"); | |
+ sdl->inited = 1; | |
+ } | |
+ | |
+ if ( s->nb_streams > 1 | |
+ || encctx->codec_type != AVMEDIA_TYPE_VIDEO | |
+ || encctx->codec_id != AV_CODEC_ID_RAWVIDEO) { | |
+ av_log(s, AV_LOG_ERROR, "Only supports one rawvideo stream\n"); | |
+ goto fail; | |
+ } | |
+ | |
+ for (i = 0; sdl_texture_pix_fmt_map[i].pix_fmt != AV_PIX_FMT_NONE; i++) { | |
+ if (sdl_texture_pix_fmt_map[i].pix_fmt == encctx->pix_fmt) { | |
+ sdl->texture_fmt = sdl_texture_pix_fmt_map[i].texture_fmt; | |
+ break; | |
+ } | |
+ } | |
+ | |
+ if (!sdl->texture_fmt) { | |
+ av_log(s, AV_LOG_ERROR, | |
+ "Unsupported pixel format '%s', choose one of yuv420p, yuyv422, uyvy422, BGRA\n", | |
+ av_get_pix_fmt_name(encctx->pix_fmt)); | |
+ goto fail; | |
+ } | |
+ | |
+ /* resize texture to width and height from the codec context information */ | |
+ | |
+ int flags = SDL_BASE_FLAGS | (sdl->window_fullscreen ? SDL_WINDOW_FULLSCREEN : 0) | | |
+ (sdl->window_borderless ? SDL_WINDOW_BORDERLESS : 0); | |
+ | |
+ /* initialization */ | |
+ if (!sdl->inited){ | |
+ if (SDL_Init(SDL_INIT_VIDEO) != 0) { | |
+ av_log(s, AV_LOG_ERROR, "Unable to initialize SDL: %s\n", SDL_GetError()); | |
+ goto fail; | |
+ } | |
+ } | |
+ | |
+ sdl->window_width = sdl->texture_rect.w = encctx->width; | |
+ sdl->window_height = sdl->texture_rect.h = encctx->height; | |
+ sdl->texture_rect.x = sdl->texture_rect.y = 0; | |
+ | |
+ if (SDL_CreateWindowAndRenderer(sdl->window_width, sdl->window_height, | |
+ flags, &sdl->window, &sdl->renderer) != 0){ | |
+ av_log(sdl, AV_LOG_ERROR, "Couldn't create window and renderer: %s\n", SDL_GetError()); | |
+ goto fail; | |
+ } | |
+ | |
+ SDL_SetWindowTitle(sdl->window, sdl->window_title); | |
+ | |
+ sdl->texture = SDL_CreateTexture(sdl->renderer, sdl->texture_fmt, SDL_TEXTUREACCESS_STREAMING, | |
+ sdl->window_width, sdl->window_height); | |
+ | |
+ if (!sdl->texture) { | |
+ av_log(sdl, AV_LOG_ERROR, "Unable to set create mode: %s\n", SDL_GetError()); | |
+ goto fail; | |
+ } | |
+ | |
+ av_log(s, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s -> w:%d h:%d\n", | |
+ encctx->width, encctx->height, av_get_pix_fmt_name(encctx->pix_fmt), | |
+ sdl->window_width, sdl->window_height); | |
+ | |
+ sdl->inited = 1; | |
+ | |
+ return 0; | |
+fail: | |
+ sdl2_write_trailer(s); | |
+ return ret; | |
+} | |
+ | |
+static int sdl2_write_packet(AVFormatContext *s, AVPacket *pkt) | |
+{ | |
+ int ret = 0; | |
+ SDLContext *sdl = s->priv_data; | |
+ AVCodecContext *encctx = s->streams[0]->codec; | |
+ uint8_t *data[4]; | |
+ int linesize[4]; | |
+ | |
+ SDL_Event event; | |
+ if (SDL_PollEvent(&event)){ | |
+ switch (event.type) { | |
+ case SDL_KEYDOWN: | |
+ switch (event.key.keysym.sym) { | |
+ case SDLK_ESCAPE: | |
+ case SDLK_q: | |
+ sdl->quit = 1; | |
+ break; | |
+ default: | |
+ break; | |
+ } | |
+ break; | |
+ case SDL_QUIT: | |
+ sdl->quit = 1; | |
+ break; | |
+ case SDL_WINDOWEVENT: | |
+ switch(event.window.event){ | |
+ case SDL_WINDOWEVENT_RESIZED: | |
+ case SDL_WINDOWEVENT_SIZE_CHANGED: | |
+ sdl->window_width = event.window.data1; | |
+ sdl->window_height = event.window.data2; | |
+ compute_texture_rect(s); | |
+ break; | |
+ default: | |
+ break; | |
+ } | |
+ break; | |
+ default: | |
+ break; | |
+ } | |
+ } | |
+ | |
+ if (sdl->quit) { | |
+ sdl2_write_trailer(s); | |
+ return AVERROR(EIO); | |
+ } | |
+ | |
+ av_image_fill_arrays(data, linesize, pkt->data, encctx->pix_fmt, encctx->width, encctx->height, 1); | |
+ SDL_LockMutex(sdl->mutex); | |
+ switch (sdl->texture_fmt) { | |
+ case SDL_PIXELFORMAT_IYUV: | |
+ case SDL_PIXELFORMAT_YUY2: | |
+ case SDL_PIXELFORMAT_UYVY: | |
+ ret = SDL_UpdateYUVTexture(sdl->texture, NULL, | |
+ data[0], linesize[0], | |
+ data[1], linesize[1], | |
+ data[2], linesize[2]); | |
+ break; | |
+ case SDL_PIXELFORMAT_RGB332: | |
+ case SDL_PIXELFORMAT_RGB444: | |
+ case SDL_PIXELFORMAT_RGB555: | |
+ case SDL_PIXELFORMAT_BGR555: | |
+ // case SDL_PIXELFORMAT_ARGB4444: | |
+ // case SDL_PIXELFORMAT_RGBA4444: | |
+ // case SDL_PIXELFORMAT_ABGR4444: | |
+ // case SDL_PIXELFORMAT_BGRA4444: | |
+ // case SDL_PIXELFORMAT_ARGB1555: | |
+ // case SDL_PIXELFORMAT_RGBA5551: | |
+ // case SDL_PIXELFORMAT_ABGR1555: | |
+ // case SDL_PIXELFORMAT_BGRA5551: | |
+ case SDL_PIXELFORMAT_RGB565: | |
+ case SDL_PIXELFORMAT_BGR565: | |
+ case SDL_PIXELFORMAT_RGB24: | |
+ case SDL_PIXELFORMAT_BGR24: | |
+ case SDL_PIXELFORMAT_RGB888: // is this right? | |
+ case SDL_PIXELFORMAT_RGBX8888: | |
+ case SDL_PIXELFORMAT_BGR888: // is this right? | |
+ case SDL_PIXELFORMAT_BGRX8888: | |
+ case SDL_PIXELFORMAT_ARGB8888: | |
+ case SDL_PIXELFORMAT_RGBA8888: | |
+ case SDL_PIXELFORMAT_ABGR8888: | |
+ case SDL_PIXELFORMAT_BGRA8888: | |
+ // case SDL_PIXELFORMAT_ARGB2101010: | |
+ ret = SDL_UpdateTexture(sdl->texture, NULL, data[0], linesize[0]); | |
+ break; | |
+ default: | |
+ av_log(NULL, AV_LOG_FATAL, "Unsupported pixel format\n"); | |
+ ret = -1; | |
+ break; | |
+ } | |
+ SDL_RenderClear(sdl->renderer); | |
+ SDL_RenderCopy(sdl->renderer, sdl->texture, NULL, &sdl->texture_rect); | |
+ SDL_RenderPresent(sdl->renderer); | |
+ SDL_UnlockMutex(sdl->mutex); | |
+ return ret; | |
+} | |
+ | |
+#define OFFSET(x) offsetof(SDLContext,x) | |
+ | |
+static const AVOption options[] = { | |
+ { "window_title", "set SDL window title", OFFSET(window_title), AV_OPT_TYPE_STRING, { .str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, | |
+ { "window_size", "set SDL window forced size", OFFSET(window_width), AV_OPT_TYPE_IMAGE_SIZE, { .str = NULL }, 0, 0, AV_OPT_FLAG_ENCODING_PARAM }, | |
+ { "window_fullscreen", "set SDL window fullscreen", OFFSET(window_fullscreen), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, AV_OPT_FLAG_ENCODING_PARAM }, | |
+ { "window_borderless", "set SDL window border off", OFFSET(window_fullscreen), AV_OPT_TYPE_INT, { .i64 = 0 }, INT_MIN, INT_MAX, AV_OPT_FLAG_ENCODING_PARAM }, | |
+ { NULL }, | |
+}; | |
+ | |
+static const AVClass sdl2_class = { | |
+ .class_name = "sdl2 outdev", | |
+ .item_name = av_default_item_name, | |
+ .option = options, | |
+ .version = LIBAVUTIL_VERSION_INT, | |
+ .category = AV_CLASS_CATEGORY_DEVICE_VIDEO_OUTPUT, | |
+}; | |
+ | |
+AVOutputFormat ff_sdl2_muxer = { | |
+ .name = "sdl2", | |
+ .long_name = NULL_IF_CONFIG_SMALL("SDL2 output device"), | |
+ .priv_data_size = sizeof(SDLContext), | |
+ .audio_codec = AV_CODEC_ID_NONE, | |
+ .video_codec = AV_CODEC_ID_RAWVIDEO, | |
+ .write_header = sdl2_write_header, | |
+ .write_packet = sdl2_write_packet, | |
+ .write_trailer = sdl2_write_trailer, | |
+ .flags = AVFMT_NOFILE | AVFMT_VARIABLE_FPS | AVFMT_NOTIMESTAMPS, | |
+ .priv_class = &sdl2_class, | |
+}; | |
\ No newline at end of file | |
-- | |
2.6.4 (Apple Git-63) | |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
From 44cfa4ddbe90e7dfa2fab578b66c9612f74b9fc5 Mon Sep 17 00:00:00 2001 | |
From: Josh de Kock <josh@itanimul.li> | |
Date: Tue, 17 May 2016 13:20:14 +0100 | |
Subject: [PATCH 3/3] avdevice/sdl2: re-add threading | |
--- | |
libavdevice/sdl2.c | 276 ++++++++++++++++++++++++++++++++--------------------- | |
1 file changed, 168 insertions(+), 108 deletions(-) | |
diff --git a/libavdevice/sdl2.c b/libavdevice/sdl2.c | |
index 4b0dea0..a185bf4 100644 | |
--- a/libavdevice/sdl2.c | |
+++ b/libavdevice/sdl2.c | |
@@ -39,7 +39,7 @@ typedef struct { | |
SDL_Window *window; | |
SDL_Renderer *renderer; | |
char *window_title; | |
- int window_width, window_height; /**< size of the window */ | |
+ int window_width, window_height; /**< size of the window */ | |
int window_fullscreen; | |
int window_borderless; | |
@@ -47,10 +47,12 @@ typedef struct { | |
int texture_fmt; | |
SDL_Rect texture_rect; | |
- int inited; | |
+ int sdl_was_already_inited; | |
SDL_Thread *event_thread; | |
SDL_mutex *mutex; | |
SDL_cond *init_cond; | |
+ int init_ret; /* return code used to signal initialization errors */ | |
+ int inited; | |
int quit; | |
} SDLContext; | |
@@ -73,19 +75,19 @@ static const struct sdl_texture_pix_fmt_entry { | |
{ AV_PIX_FMT_BGR565, SDL_PIXELFORMAT_BGR565 }, | |
{ AV_PIX_FMT_RGB24, SDL_PIXELFORMAT_RGB24 }, | |
{ AV_PIX_FMT_BGR24, SDL_PIXELFORMAT_BGR24 }, | |
- { AV_PIX_FMT_RGB24, SDL_PIXELFORMAT_RGB888 }, // is this right? | |
+ { AV_PIX_FMT_RGB24, SDL_PIXELFORMAT_RGB888 }, // is this mapping right? | |
{ AV_PIX_FMT_RGB0, SDL_PIXELFORMAT_RGBX8888 }, | |
- { AV_PIX_FMT_BGR24, SDL_PIXELFORMAT_BGR888 }, // is this right? | |
+ { AV_PIX_FMT_BGR24, SDL_PIXELFORMAT_BGR888 }, // is this mapping right? | |
{ AV_PIX_FMT_BGR0, SDL_PIXELFORMAT_BGRX8888 }, | |
{ AV_PIX_FMT_ARGB, SDL_PIXELFORMAT_ARGB8888 }, | |
{ AV_PIX_FMT_RGBA, SDL_PIXELFORMAT_RGBA8888 }, | |
{ AV_PIX_FMT_ABGR, SDL_PIXELFORMAT_ABGR8888 }, | |
{ AV_PIX_FMT_BGRA, SDL_PIXELFORMAT_BGRA8888 }, | |
- // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ARGB2101010 }, // not implemented | |
+ // { AV_PIX_FMT_NONE, SDL_PIXELFORMAT_ARGB2101010 }, // not implemented | |
{ AV_PIX_FMT_YUV420P, SDL_PIXELFORMAT_IYUV }, | |
{ AV_PIX_FMT_YUYV422, SDL_PIXELFORMAT_YUY2 }, | |
{ AV_PIX_FMT_UYVY422, SDL_PIXELFORMAT_UYVY }, | |
- { AV_PIX_FMT_NONE, 0 }, | |
+ { AV_PIX_FMT_NONE, NULL }, | |
}; | |
static void compute_texture_rect(AVFormatContext *s) | |
@@ -128,37 +130,103 @@ static void compute_texture_rect(AVFormatContext *s) | |
texture_rect->y = (sdl->window_height - texture_rect->h) / 2; | |
} | |
-static int sdl2_write_trailer(AVFormatContext *s) | |
+static int event_thread(void *arg) | |
{ | |
- SDLContext *sdl = s->priv_data; | |
+ int flags = SDL_BASE_FLAGS | (sdl->window_fullscreen ? SDL_WINDOW_FULLSCREEN : 0) | | |
+ (sdl->window_borderless ? SDL_WINDOW_BORDERLESS : 0); | |
- sdl->quit = 1; | |
+ /* initialization */ | |
+ if (!sdl->inited){ | |
+ if (SDL_Init(SDL_INIT_VIDEO) != 0) { | |
+ av_log(s, AV_LOG_ERROR, "Unable to initialize SDL: %s\n", SDL_GetError()); | |
+ sdl->init_ret = AVERROR(EINVAL); | |
+ goto init_end; | |
+ } | |
+ } | |
- if (sdl->texture) | |
- SDL_DestroyTexture(sdl->texture); | |
- sdl->texture = NULL; | |
- if (sdl->event_thread) | |
- SDL_WaitThread(sdl->event_thread, NULL); | |
- sdl->event_thread = NULL; | |
- if (sdl->mutex) | |
- SDL_DestroyMutex(sdl->mutex); | |
- sdl->mutex = NULL; | |
- if (sdl->init_cond) | |
- SDL_DestroyCond(sdl->init_cond); | |
- sdl->init_cond = NULL; | |
+ sdl->window_width = sdl->texture_rect.w = encctx->width; | |
+ sdl->window_height = sdl->texture_rect.h = encctx->height; | |
+ sdl->texture_rect.x = sdl->texture_rect.y = 0; | |
- if (sdl->renderer) | |
- SDL_DestroyRenderer(sdl->renderer); | |
- sdl->renderer = NULL; | |
+ if (SDL_CreateWindowAndRenderer(sdl->window_width, sdl->window_height, | |
+ flags, &sdl->window, &sdl->renderer) != 0){ | |
+ av_log(sdl, AV_LOG_ERROR, "Couldn't create window and renderer: %s\n", SDL_GetError()); | |
+ sdl->init_ret = AVERROR(EINVAL); | |
+ goto init_end; | |
+ } | |
- if (sdl->window) | |
- SDL_DestroyWindow(sdl->window); | |
- sdl->window = NULL; | |
+ SDL_SetWindowTitle(sdl->window, sdl->window_title); | |
- if (!sdl->inited) | |
- SDL_Quit(); | |
+ sdl->texture = SDL_CreateTexture(sdl->renderer, sdl->texture_fmt, SDL_TEXTUREACCESS_STREAMING, | |
+ sdl->window_width, sdl->window_height); | |
- return 0; | |
+ if (!sdl->texture) { | |
+ av_log(sdl, AV_LOG_ERROR, "Unable to set create mode: %s\n", SDL_GetError()); | |
+ sdl->init_ret = AVERROR(EINVAL); | |
+ goto init_end; | |
+ } | |
+ | |
+ av_log(s, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s -> w:%d h:%d\n", | |
+ encctx->width, encctx->height, av_get_pix_fmt_name(encctx->pix_fmt), | |
+ sdl->window_width, sdl->window_height); | |
+ | |
+init_end: | |
+ | |
+ SDL_LockMutex(sdl->mutex); | |
+ sdl->inited = 1; | |
+ SDL_UnlockMutex(sdl->mutex); | |
+ SDL_CondSignal(sdl->init_cond); | |
+ | |
+ if (sdl->init_ret < 0) | |
+ return sdl->init_ret; | |
+ | |
+ /* event loop */ | |
+ while (!sdl->quit) { | |
+ SDL_Event event; | |
+ int ret = SDL_PollEvent(&event); | |
+ if (ret < 0) { | |
+ av_log(s, AV_LOG_ERROR, "Error when getting SDL event: %s\n", SDL_GetError()); | |
+ continue; | |
+ } | |
+ if (ret == 0) { | |
+ SDL_Delay(10); | |
+ continue; | |
+ } | |
+ switch (event.type) { | |
+ case SDL_KEYDOWN: | |
+ switch (event.key.keysym.sym) { | |
+ case SDLK_ESCAPE: | |
+ case SDLK_q: | |
+ sdl->quit = 1; | |
+ break; | |
+ default: | |
+ break; | |
+ } | |
+ break; | |
+ case SDL_QUIT: | |
+ sdl->quit = 1; | |
+ break; | |
+ case SDL_WINDOWEVENT: | |
+ switch(event.window.event){ | |
+ case SDL_WINDOWEVENT_RESIZED: | |
+ case SDL_WINDOWEVENT_SIZE_CHANGED: | |
+ sdl->window_width = event.window.data1; | |
+ sdl->window_height = event.window.data2; | |
+ SDL_LockMutex(sdl->mutex); | |
+ compute_texture_rect(s); | |
+ SDL_UnlockMutex(sdl->mutex); | |
+ break; | |
+ default: | |
+ break; | |
+ } | |
+ break; | |
+ default: | |
+ break; | |
+ } | |
+ } | |
+ | |
+ sdl2_write_trailer(s); | |
+ return ret; | |
} | |
#define SDL_BASE_FLAGS (SDL_SWSURFACE|SDL_WINDOW_RESIZABLE) | |
@@ -176,6 +244,7 @@ static int sdl2_write_header(AVFormatContext *s) | |
if (SDL_WasInit(SDL_INIT_VIDEO)) { | |
av_log(s, AV_LOG_WARNING, | |
"SDL video subsystem was already inited, you could have multiple SDL outputs. This may cause unknown behaviour.\n"); | |
+ sdl->init_ret = AVERROR(EINVAL); | |
sdl->inited = 1; | |
} | |
@@ -183,6 +252,7 @@ static int sdl2_write_header(AVFormatContext *s) | |
|| encctx->codec_type != AVMEDIA_TYPE_VIDEO | |
|| encctx->codec_id != AV_CODEC_ID_RAWVIDEO) { | |
av_log(s, AV_LOG_ERROR, "Only supports one rawvideo stream\n"); | |
+ sdl->init_ret = AVERROR(EINVAL); | |
goto fail; | |
} | |
@@ -197,49 +267,42 @@ static int sdl2_write_header(AVFormatContext *s) | |
av_log(s, AV_LOG_ERROR, | |
"Unsupported pixel format '%s', choose one of yuv420p, yuyv422, uyvy422, BGRA\n", | |
av_get_pix_fmt_name(encctx->pix_fmt)); | |
+ sdl->init_ret = AVERROR(EINVAL); | |
goto fail; | |
} | |
- /* resize texture to width and height from the codec context information */ | |
- | |
- int flags = SDL_BASE_FLAGS | (sdl->window_fullscreen ? SDL_WINDOW_FULLSCREEN : 0) | | |
- (sdl->window_borderless ? SDL_WINDOW_BORDERLESS : 0); | |
- | |
- /* initialization */ | |
- if (!sdl->inited){ | |
- if (SDL_Init(SDL_INIT_VIDEO) != 0) { | |
- av_log(s, AV_LOG_ERROR, "Unable to initialize SDL: %s\n", SDL_GetError()); | |
- goto fail; | |
- } | |
+ sdl->init_cond = SDL_CreateCond(); | |
+ if (!sdl->init_cond) { | |
+ av_log(s, AV_LOG_ERROR, "Could not create SDL condition variable: %s\n", SDL_GetError()); | |
+ ret = AVERROR_EXTERNAL; | |
+ goto fail; | |
} | |
- | |
- sdl->window_width = sdl->texture_rect.w = encctx->width; | |
- sdl->window_height = sdl->texture_rect.h = encctx->height; | |
- sdl->texture_rect.x = sdl->texture_rect.y = 0; | |
- | |
- if (SDL_CreateWindowAndRenderer(sdl->window_width, sdl->window_height, | |
- flags, &sdl->window, &sdl->renderer) != 0){ | |
- av_log(sdl, AV_LOG_ERROR, "Couldn't create window and renderer: %s\n", SDL_GetError()); | |
+ sdl->mutex = SDL_CreateMutex(); | |
+ if (!sdl->mutex) { | |
+ av_log(s, AV_LOG_ERROR, "Could not create SDL mutex: %s\n", SDL_GetError()); | |
+ ret = AVERROR_EXTERNAL; | |
goto fail; | |
} | |
- | |
- SDL_SetWindowTitle(sdl->window, sdl->window_title); | |
- | |
- sdl->texture = SDL_CreateTexture(sdl->renderer, sdl->texture_fmt, SDL_TEXTUREACCESS_STREAMING, | |
- sdl->window_width, sdl->window_height); | |
- | |
- if (!sdl->texture) { | |
- av_log(sdl, AV_LOG_ERROR, "Unable to set create mode: %s\n", SDL_GetError()); | |
+ sdl->event_thread = SDL_CreateThread(event_thread, "event_thread", s); | |
+ if (!sdl->event_thread) { | |
+ av_log(s, AV_LOG_ERROR, "Could not create SDL event thread: %s\n", SDL_GetError()); | |
+ ret = AVERROR_EXTERNAL; | |
goto fail; | |
} | |
- av_log(s, AV_LOG_VERBOSE, "w:%d h:%d fmt:%s -> w:%d h:%d\n", | |
- encctx->width, encctx->height, av_get_pix_fmt_name(encctx->pix_fmt), | |
- sdl->window_width, sdl->window_height); | |
+ /* wait until the video system has been inited */ | |
+ SDL_LockMutex(sdl->mutex); | |
+ while (!sdl->inited) { | |
+ SDL_CondWait(sdl->init_cond, sdl->mutex); | |
+ } | |
- sdl->inited = 1; | |
- | |
+ SDL_UnlockMutex(sdl->mutex); | |
+ if (sdl->init_ret < 0) { | |
+ ret = sdl->init_ret; | |
+ goto fail; | |
+ } | |
return 0; | |
+ | |
fail: | |
sdl2_write_trailer(s); | |
return ret; | |
@@ -253,44 +316,6 @@ static int sdl2_write_packet(AVFormatContext *s, AVPacket *pkt) | |
uint8_t *data[4]; | |
int linesize[4]; | |
- SDL_Event event; | |
- if (SDL_PollEvent(&event)){ | |
- switch (event.type) { | |
- case SDL_KEYDOWN: | |
- switch (event.key.keysym.sym) { | |
- case SDLK_ESCAPE: | |
- case SDLK_q: | |
- sdl->quit = 1; | |
- break; | |
- default: | |
- break; | |
- } | |
- break; | |
- case SDL_QUIT: | |
- sdl->quit = 1; | |
- break; | |
- case SDL_WINDOWEVENT: | |
- switch(event.window.event){ | |
- case SDL_WINDOWEVENT_RESIZED: | |
- case SDL_WINDOWEVENT_SIZE_CHANGED: | |
- sdl->window_width = event.window.data1; | |
- sdl->window_height = event.window.data2; | |
- compute_texture_rect(s); | |
- break; | |
- default: | |
- break; | |
- } | |
- break; | |
- default: | |
- break; | |
- } | |
- } | |
- | |
- if (sdl->quit) { | |
- sdl2_write_trailer(s); | |
- return AVERROR(EIO); | |
- } | |
- | |
av_image_fill_arrays(data, linesize, pkt->data, encctx->pix_fmt, encctx->width, encctx->height, 1); | |
SDL_LockMutex(sdl->mutex); | |
switch (sdl->texture_fmt) { | |
@@ -306,14 +331,16 @@ static int sdl2_write_packet(AVFormatContext *s, AVPacket *pkt) | |
case SDL_PIXELFORMAT_RGB444: | |
case SDL_PIXELFORMAT_RGB555: | |
case SDL_PIXELFORMAT_BGR555: | |
- // case SDL_PIXELFORMAT_ARGB4444: | |
- // case SDL_PIXELFORMAT_RGBA4444: | |
- // case SDL_PIXELFORMAT_ABGR4444: | |
- // case SDL_PIXELFORMAT_BGRA4444: | |
- // case SDL_PIXELFORMAT_ARGB1555: | |
- // case SDL_PIXELFORMAT_RGBA5551: | |
- // case SDL_PIXELFORMAT_ABGR1555: | |
- // case SDL_PIXELFORMAT_BGRA5551: | |
+ /* Not implemented in FFmpeg | |
+ case SDL_PIXELFORMAT_ARGB4444: | |
+ case SDL_PIXELFORMAT_RGBA4444: | |
+ case SDL_PIXELFORMAT_ABGR4444: | |
+ case SDL_PIXELFORMAT_BGRA4444: | |
+ case SDL_PIXELFORMAT_ARGB1555: | |
+ case SDL_PIXELFORMAT_RGBA5551: | |
+ case SDL_PIXELFORMAT_ABGR1555: | |
+ case SDL_PIXELFORMAT_BGRA5551: | |
+ */ | |
case SDL_PIXELFORMAT_RGB565: | |
case SDL_PIXELFORMAT_BGR565: | |
case SDL_PIXELFORMAT_RGB24: | |
@@ -341,6 +368,39 @@ static int sdl2_write_packet(AVFormatContext *s, AVPacket *pkt) | |
return ret; | |
} | |
+static int sdl2_write_trailer(AVFormatContext *s) | |
+{ | |
+ SDLContext *sdl = s->priv_data; | |
+ | |
+ sdl->quit = 1; | |
+ | |
+ if (sdl->texture) | |
+ SDL_DestroyTexture(sdl->texture); | |
+ sdl->texture = NULL; | |
+ if (sdl->event_thread) | |
+ SDL_WaitThread(sdl->event_thread, NULL); | |
+ sdl->event_thread = NULL; | |
+ if (sdl->mutex) | |
+ SDL_DestroyMutex(sdl->mutex); | |
+ sdl->mutex = NULL; | |
+ if (sdl->init_cond) | |
+ SDL_DestroyCond(sdl->init_cond); | |
+ sdl->init_cond = NULL; | |
+ | |
+ if (sdl->renderer) | |
+ SDL_DestroyRenderer(sdl->renderer); | |
+ sdl->renderer = NULL; | |
+ | |
+ if (sdl->window) | |
+ SDL_DestroyWindow(sdl->window); | |
+ sdl->window = NULL; | |
+ | |
+ if (!sdl->inited) | |
+ SDL_Quit(); | |
+ | |
+ return 0; | |
+} | |
+ | |
#define OFFSET(x) offsetof(SDLContext,x) | |
static const AVOption options[] = { | |
-- | |
2.6.4 (Apple Git-63) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment