Skip to content

Instantly share code, notes, and snippets.

@lu-zero
Created November 30, 2011 10:57
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save lu-zero/1408671 to your computer and use it in GitHub Desktop.
Save lu-zero/1408671 to your computer and use it in GitHub Desktop.
Some corrections for you
diff --git a/libavformat/yuv4mpeg.c b/libavformat/yuv4mpeg.c
index e1db1a3..f80d2bc 100644
--- a/libavformat/yuv4mpeg.c
+++ b/libavformat/yuv4mpeg.c
@@ -42,7 +42,8 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf)
width = st->codec->width;
height = st->codec->height;
- av_reduce(&raten, &rated, st->codec->time_base.den, st->codec->time_base.num, (1UL<<31)-1);
+ av_reduce(&raten, &rated, st->codec->time_base.den,
+ st->codec->time_base.num,(1UL << 31) - 1);
aspectn = st->sample_aspect_ratio.num;
aspectd = st->sample_aspect_ratio.den;
@@ -62,9 +63,12 @@ static int yuv4_generate_header(AVFormatContext *s, char* buf)
colorspace = " C411 XYSCSS=411";
break;
case PIX_FMT_YUV420P:
- colorspace = (st->codec->chroma_sample_location == AVCHROMA_LOC_TOPLEFT)?" C420paldv XYSCSS=420PALDV":
- (st->codec->chroma_sample_location == AVCHROMA_LOC_LEFT) ?" C420mpeg2 XYSCSS=420MPEG2":
- " C420jpeg XYSCSS=420JPEG";
+ colorspace =
+ (st->codec->chroma_sample_location == AVCHROMA_LOC_TOPLEFT)?
+ " C420paldv XYSCSS=420PALDV":
+ (st->codec->chroma_sample_location == AVCHROMA_LOC_LEFT)?
+ " C420mpeg2 XYSCSS=420MPEG2":
+ " C420jpeg XYSCSS=420JPEG";
break;
case PIX_FMT_YUV422P:
colorspace = " C422 XYSCSS=422";
@@ -95,7 +99,7 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
int* first_pkt = s->priv_data;
int width, height, h_chroma_shift, v_chroma_shift;
int i;
- char buf2[Y4M_LINE_MAX+1];
+ char buf2[Y4M_LINE_MAX + 1];
char buf1[20];
uint8_t *ptr, *ptr1, *ptr2;
@@ -105,7 +109,8 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
if (*first_pkt) {
*first_pkt = 0;
if (yuv4_generate_header(s, buf2) < 0) {
- av_log(s, AV_LOG_ERROR, "Error. YUV4MPEG stream header write failed.\n");
+ av_log(s, AV_LOG_ERROR,
+ "Error. YUV4MPEG stream header write failed.\n");
return AVERROR(EIO);
} else {
avio_write(pb, buf2, strlen(buf2));
@@ -121,26 +126,27 @@ static int yuv4_write_packet(AVFormatContext *s, AVPacket *pkt)
height = st->codec->height;
ptr = picture->data[0];
- for(i=0;i<height;i++) {
+ for (i = 0; i < height; i++) {
avio_write(pb, ptr, width);
ptr += picture->linesize[0];
}
if (st->codec->pix_fmt != PIX_FMT_GRAY8){
// Adjust for smaller Cb and Cr planes
- avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift, &v_chroma_shift);
+ avcodec_get_chroma_sub_sample(st->codec->pix_fmt, &h_chroma_shift,
+ &v_chroma_shift);
width >>= h_chroma_shift;
height >>= v_chroma_shift;
ptr1 = picture->data[1];
ptr2 = picture->data[2];
- for(i=0;i<height;i++) { /* Cb */
+ for (i = 0; i < height; i++) { /* Cb */
avio_write(pb, ptr1, width);
ptr1 += picture->linesize[1];
}
- for(i=0;i<height;i++) { /* Cr */
+ for (i = 0; i < height; i++) { /* Cr */
avio_write(pb, ptr2, width);
- ptr2 += picture->linesize[2];
+ ptr2 += picture->linesize[2];
}
}
avio_flush(pb);
@@ -155,13 +161,17 @@ static int yuv4_write_header(AVFormatContext *s)
return AVERROR(EIO);
if (s->streams[0]->codec->pix_fmt == PIX_FMT_YUV411P) {
- av_log(s, AV_LOG_ERROR, "Warning: generating rarely used 4:1:1 YUV stream, some mjpegtools might not work.\n");
+ av_log(s, AV_LOG_ERROR,
+ "Warning: generating rarely used 4:1:1 YUV stream, "
+ "some mjpegtools might not work.\n");
}
else if ((s->streams[0]->codec->pix_fmt != PIX_FMT_YUV420P) &&
(s->streams[0]->codec->pix_fmt != PIX_FMT_YUV422P) &&
(s->streams[0]->codec->pix_fmt != PIX_FMT_GRAY8) &&
(s->streams[0]->codec->pix_fmt != PIX_FMT_YUV444P)) {
- av_log(s, AV_LOG_ERROR, "ERROR: yuv4mpeg only handles yuv444p, yuv422p, yuv420p, yuv411p and gray pixel formats. Use -pix_fmt to select one.\n");
+ av_log(s, AV_LOG_ERROR,
+ "ERROR: yuv4mpeg only handles yuv444p, yuv422p, yuv420p, "
+ "yuv411p and gray pixel formats. Use -pix_fmt to select one.\n");
return AVERROR(EIO);
}
@@ -189,21 +199,22 @@ AVOutputFormat ff_yuv4mpegpipe_muxer = {
static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
{
- char header[MAX_YUV4_HEADER+10]; // Include headroom for the longest option
- char *tokstart,*tokend,*header_end;
+ char header[MAX_YUV4_HEADER + 10]; // Include headroom for the longest option
+ char *tokstart, *tokend, *header_end;
int i;
AVIOContext *pb = s->pb;
- int width=-1, height=-1, raten=0, rated=0, aspectn=0, aspectd=0;
- enum PixelFormat pix_fmt=PIX_FMT_NONE,alt_pix_fmt=PIX_FMT_NONE;
+ int width = -1, height = -1, raten = 0, rated = 0, aspectn = 0, aspectd = 0;
+ enum PixelFormat pix_fmt = PIX_FMT_NONE, alt_pix_fmt = PIX_FMT_NONE;
enum AVChromaLocation chroma_sample_location = AVCHROMA_LOC_UNSPECIFIED;
AVStream *st;
struct frame_attributes *s1 = s->priv_data;
- for (i=0; i<MAX_YUV4_HEADER; i++) {
+ for (i = 0; i < MAX_YUV4_HEADER; i++) {
header[i] = avio_r8(pb);
if (header[i] == '\n') {
- header[i+1] = 0x20; // Add a space after last option. Makes parsing "444" vs "444alpha" easier.
- header[i+2] = 0;
+ header[i + 1] = 0x20; // Add a space after last option.
+ // Makes parsing "444" vs "444alpha" easier.
+ header[i + 2] = 0;
break;
}
}
@@ -212,62 +223,67 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
s1->interlaced_frame = 0;
s1->top_field_first = 0;
- header_end = &header[i+1]; // Include space
- for(tokstart = &header[strlen(Y4M_MAGIC) + 1]; tokstart < header_end; tokstart++) {
- if (*tokstart==0x20) continue;
+ header_end = &header[i + 1]; // Include space
+ for (tokstart = &header[strlen(Y4M_MAGIC) + 1];
+ tokstart < header_end; tokstart++) {
+ if (*tokstart == 0x20) continue;
switch (*tokstart++) {
case 'W': // Width. Required.
width = strtol(tokstart, &tokend, 10);
- tokstart=tokend;
+ tokstart = tokend;
break;
case 'H': // Height. Required.
height = strtol(tokstart, &tokend, 10);
- tokstart=tokend;
+ tokstart = tokend;
break;
case 'C': // Color space
- if (strncmp("420jpeg",tokstart,7)==0) {
+ if (strncmp("420jpeg", tokstart, 7) == 0) {
pix_fmt = PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_CENTER;
- } else if (strncmp("420mpeg2",tokstart,8)==0) {
+ } else if (strncmp("420mpeg2", tokstart, 8) == 0) {
pix_fmt = PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_LEFT;
- } else if (strncmp("420paldv", tokstart, 8)==0) {
+ } else if (strncmp("420paldv", tokstart, 8) == 0) {
pix_fmt = PIX_FMT_YUV420P;
chroma_sample_location = AVCHROMA_LOC_TOPLEFT;
- } else if (strncmp("411", tokstart, 3)==0)
+ } else if (strncmp("411", tokstart, 3) == 0)
pix_fmt = PIX_FMT_YUV411P;
- else if (strncmp("422", tokstart, 3)==0)
+ else if (strncmp("422", tokstart, 3) == 0)
pix_fmt = PIX_FMT_YUV422P;
- else if (strncmp("444alpha", tokstart, 8)==0) {
- av_log(s, AV_LOG_ERROR, "Cannot handle 4:4:4:4 YUV4MPEG stream.\n");
+ else if (strncmp("444alpha", tokstart, 8) ==0 ) {
+ av_log(s, AV_LOG_ERROR,
+ "Cannot handle 4:4:4:4 YUV4MPEG stream.\n");
return -1;
- } else if (strncmp("444", tokstart, 3)==0)
+ } else if (strncmp("444", tokstart, 3) == 0)
pix_fmt = PIX_FMT_YUV444P;
- else if (strncmp("mono",tokstart, 4)==0) {
+ else if (strncmp("mono", tokstart, 4) == 0) {
pix_fmt = PIX_FMT_GRAY8;
} else {
- av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains an unknown pixel format.\n");
+ av_log(s, AV_LOG_ERROR,
+ "YUV4MPEG stream contains an unknown pixel format.\n");
return -1;
}
- while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
+ while (tokstart < header_end && *tokstart != 0x20) tokstart++;
break;
case 'I': // Interlace type
switch (*tokstart++){
case '?':
break;
case 'p':
- s1->interlaced_frame=0;
+ s1->interlaced_frame = 0;
break;
case 't':
- s1->interlaced_frame=1;
- s1->top_field_first=1;
+ s1->interlaced_frame = 1;
+ s1->top_field_first = 1;
break;
case 'b':
- s1->interlaced_frame=1;
- s1->top_field_first=0;
+ s1->interlaced_frame = 1;
+ s1->top_field_first = 0;
break;
case 'm':
- av_log(s, AV_LOG_ERROR, "YUV4MPEG stream contains mixed interlaced and non-interlaced frames.\n");
+ av_log(s, AV_LOG_ERROR,
+ "YUV4MPEG stream contains mixed interlaced "
+ "and non-interlaced frames.\n");
return -1;
default:
av_log(s, AV_LOG_ERROR, "YUV4MPEG has invalid header.\n");
@@ -275,31 +291,31 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
}
break;
case 'F': // Frame rate
- sscanf(tokstart,"%d:%d",&raten,&rated); // 0:0 if unknown
- while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
+ sscanf(tokstart, "%d:%d", &raten, &rated); // 0:0 if unknown
+ while (tokstart < header_end && *tokstart != 0x20) tokstart++;
break;
case 'A': // Pixel aspect
- sscanf(tokstart,"%d:%d",&aspectn,&aspectd); // 0:0 if unknown
- while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
+ sscanf(tokstart, "%d:%d", &aspectn, &aspectd); // 0:0 if unknown
+ while (tokstart < header_end && *tokstart != 0x20) tokstart++;
break;
case 'X': // Vendor extensions
- if (strncmp("YSCSS=",tokstart,6)==0) {
+ if (strncmp("YSCSS=", tokstart, 6) == 0) {
// Older nonstandard pixel format representation
- tokstart+=6;
- if (strncmp("420JPEG",tokstart,7)==0)
- alt_pix_fmt=PIX_FMT_YUV420P;
- else if (strncmp("420MPEG2",tokstart,8)==0)
- alt_pix_fmt=PIX_FMT_YUV420P;
- else if (strncmp("420PALDV",tokstart,8)==0)
- alt_pix_fmt=PIX_FMT_YUV420P;
- else if (strncmp("411",tokstart,3)==0)
- alt_pix_fmt=PIX_FMT_YUV411P;
- else if (strncmp("422",tokstart,3)==0)
- alt_pix_fmt=PIX_FMT_YUV422P;
- else if (strncmp("444",tokstart,3)==0)
- alt_pix_fmt=PIX_FMT_YUV444P;
+ tokstart += 6;
+ if (strncmp("420JPEG", tokstart, 7) == 0)
+ alt_pix_fmt = PIX_FMT_YUV420P;
+ else if (strncmp("420MPEG2", tokstart, 8) == 0)
+ alt_pix_fmt = PIX_FMT_YUV420P;
+ else if (strncmp("420PALDV", tokstart, 8) == 0)
+ alt_pix_fmt = PIX_FMT_YUV420P;
+ else if (strncmp("411", tokstart, 3) == 0)
+ alt_pix_fmt = PIX_FMT_YUV411P;
+ else if (strncmp("422", tokstart, 3) == 0)
+ alt_pix_fmt = PIX_FMT_YUV422P;
+ else if (strncmp("444", tokstart, 3) == 0)
+ alt_pix_fmt = PIX_FMT_YUV444P;
}
- while(tokstart<header_end&&*tokstart!=0x20) tokstart++;
+ while(tokstart < header_end && *tokstart != 0x20) tokstart++;
break;
}
}
@@ -328,16 +344,16 @@ static int yuv4_read_header(AVFormatContext *s, AVFormatParameters *ap)
}
st = avformat_new_stream(s, NULL);
- if(!st)
+ if (!st)
return AVERROR(ENOMEM);
st->codec->width = width;
st->codec->height = height;
- av_reduce(&raten, &rated, raten, rated, (1UL<<31)-1);
+ av_reduce(&raten, &rated, raten, rated, (1UL << 31) - 1);
av_set_pts_info(st, 64, rated, raten);
st->codec->pix_fmt = pix_fmt;
st->codec->codec_type = AVMEDIA_TYPE_VIDEO;
st->codec->codec_id = CODEC_ID_RAWVIDEO;
- st->sample_aspect_ratio= (AVRational){aspectn, aspectd};
+ st->sample_aspect_ratio = (AVRational){aspectn, aspectd};
st->codec->chroma_sample_location = chroma_sample_location;
return 0;
@@ -351,10 +367,10 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
AVStream *st = s->streams[0];
struct frame_attributes *s1 = s->priv_data;
- for (i=0; i<MAX_FRAME_HEADER; i++) {
+ for (i = 0; i < MAX_FRAME_HEADER; i++) {
header[i] = avio_r8(s->pb);
if (header[i] == '\n') {
- header[i+1] = 0;
+ header[i + 1] = 0;
break;
}
}
@@ -371,9 +387,9 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
if (av_get_packet(s->pb, pkt, packet_size) != packet_size)
return AVERROR(EIO);
- if (s->streams[0]->codec->coded_frame) {
- s->streams[0]->codec->coded_frame->interlaced_frame = s1->interlaced_frame;
- s->streams[0]->codec->coded_frame->top_field_first = s1->top_field_first;
+ if (st->codec->coded_frame) {
+ st->codec->coded_frame->interlaced_frame = s1->interlaced_frame;
+ st->codec->coded_frame->top_field_first = s1->top_field_first;
}
pkt->stream_index = 0;
@@ -383,7 +399,7 @@ static int yuv4_read_packet(AVFormatContext *s, AVPacket *pkt)
static int yuv4_probe(AVProbeData *pd)
{
/* check file header */
- if (strncmp(pd->buf, Y4M_MAGIC, sizeof(Y4M_MAGIC)-1)==0)
+ if (strncmp(pd->buf, Y4M_MAGIC, sizeof(Y4M_MAGIC) - 1) == 0)
return AVPROBE_SCORE_MAX;
else
return 0;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment