Skip to content

Instantly share code, notes, and snippets.

@rajatsaxena
Last active October 7, 2019 20:36
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save rajatsaxena/a9876dcb25ad291d6ad83984941278ab to your computer and use it in GitHub Desktop.
Save rajatsaxena/a9876dcb25ad291d6ad83984941278ab to your computer and use it in GitHub Desktop.
raspivid test script to also clock_reatime and gettimeofday for each frame
/*
Copyright (c) 2013, Broadcom Europe Ltd
Copyright (c) 2013, James Hughes
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
/**
* \file RaspiVid.c
* Command line program to capture a camera video stream and encode it to file.
* Also optionally display a preview/viewfinder of current camera input.
*
* \date 28th Feb 2013
* \Author: James Hughes
*
* Description
*
* 3 components are created; camera, preview and video encoder.
* Camera component has three ports, preview, video and stills.
* This program connects preview and video to the preview and video
* encoder. Using mmal we don't need to worry about buffers between these
* components, but we do need to handle buffers from the encoder, which
* are simply written straight to the file in the requisite buffer callback.
*
* If raw option is selected, a video splitter component is connected between
* camera and preview. This allows us to set up callback for raw camera data
* (in YUV420 or RGB format) which might be useful for further image processing.
*
* We use the RaspiCamControl code to handle the specific camera settings.
* We use the RaspiPreview code to handle the (generic) preview window
*/
// We use some GNU extensions (basename)
#define _GNU_SOURCE
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <memory.h>
#include <sysexits.h>
#include<time.h>
#include<sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#define VERSION_STRING "v1.3.12"
#include "bcm_host.h"
#include "interface/vcos/vcos.h"
#include "interface/mmal/mmal.h"
#include "interface/mmal/mmal_logging.h"
#include "interface/mmal/mmal_buffer.h"
#include "interface/mmal/util/mmal_util.h"
#include "interface/mmal/util/mmal_util_params.h"
#include "interface/mmal/util/mmal_default_components.h"
#include "interface/mmal/util/mmal_connection.h"
#include "RaspiCamControl.h"
#include "RaspiPreview.h"
#include "RaspiCLI.h"
#include <semaphore.h>
// Standard port setting for the camera component
#define MMAL_CAMERA_PREVIEW_PORT 0
#define MMAL_CAMERA_VIDEO_PORT 1
#define MMAL_CAMERA_CAPTURE_PORT 2
// Port configuration for the splitter component
#define SPLITTER_OUTPUT_PORT 0
#define SPLITTER_PREVIEW_PORT 1
// Video format information
// 0 implies variable
#define VIDEO_FRAME_RATE_NUM 30
#define VIDEO_FRAME_RATE_DEN 1
/// Video render needs at least 2 buffers.
#define VIDEO_OUTPUT_BUFFERS_NUM 3
// Max bitrate we allow for recording
const int MAX_BITRATE_MJPEG = 25000000; // 25Mbits/s
const int MAX_BITRATE_LEVEL4 = 25000000; // 25Mbits/s
const int MAX_BITRATE_LEVEL42 = 62500000; // 62.5Mbits/s
/// Interval at which we check for an failure abort during capture
const int ABORT_INTERVAL = 100; // ms
/// Capture/Pause switch method
/// Simply capture for time specified
#define WAIT_METHOD_NONE 0
/// Cycle between capture and pause for times specified
#define WAIT_METHOD_TIMED 1
/// Switch between capture and pause on keypress
#define WAIT_METHOD_KEYPRESS 2
/// Switch between capture and pause on signal
#define WAIT_METHOD_SIGNAL 3
/// Run/record forever
#define WAIT_METHOD_FOREVER 4
int mmal_status_to_int(MMAL_STATUS_T status);
static void signal_handler(int signal_number);
// Forward
typedef struct RASPIVID_STATE_S RASPIVID_STATE;
/** Struct used to pass information in encoder port userdata to callback
*/
typedef struct
{
FILE *file_handle; /// File handle to write buffer data to.
RASPIVID_STATE *pstate; /// pointer to our state in case required in callback
int abort; /// Set to 1 in callback if an error occurs to attempt to abort the capture
char *cb_buff; /// Circular buffer
int cb_len; /// Length of buffer
int cb_wptr; /// Current write pointer
int cb_wrap; /// Has buffer wrapped at least once?
int cb_data; /// Valid bytes in buffer
#define IFRAME_BUFSIZE (60*1000)
int iframe_buff[IFRAME_BUFSIZE]; /// buffer of iframe pointers
int iframe_buff_wpos;
int iframe_buff_rpos;
char header_bytes[29];
int header_wptr;
FILE *imv_file_handle; /// File handle to write inline motion vectors to.
FILE *raw_file_handle; /// File handle to write raw data to.
int flush_buffers;
FILE *pts_file_handle; /// File timestamps
FILE *clockrt_file_handle; /// CLOCK Real Time file timestamps
FILE *datetime_file_handle; /// datetime file timestamps
} PORT_USERDATA;
/** Possible raw output formats
*/
typedef enum {
RAW_OUTPUT_FMT_YUV = 1,
RAW_OUTPUT_FMT_RGB,
RAW_OUTPUT_FMT_GRAY,
} RAW_OUTPUT_FMT;
/** Structure containing all state information for the current run
*/
struct RASPIVID_STATE_S
{
int timeout; /// Time taken before frame is grabbed and app then shuts down. Units are milliseconds
int width; /// Requested width of image
int height; /// requested height of image
MMAL_FOURCC_T encoding; /// Requested codec video encoding (MJPEG or H264)
int bitrate; /// Requested bitrate
int framerate; /// Requested frame rate (fps)
int intraperiod; /// Intra-refresh period (key frame rate)
int quantisationParameter; /// Quantisation parameter - quality. Set bitrate 0 and set this for variable bitrate
int bInlineHeaders; /// Insert inline headers to stream (SPS, PPS)
char *filename; /// filename of output file
int verbose; /// !0 if want detailed run information
int demoMode; /// Run app in demo mode
int demoInterval; /// Interval between camera settings changes
int immutableInput; /// Flag to specify whether encoder works in place or creates a new buffer. Result is preview can display either
/// the camera output or the encoder output (with compression artifacts)
int profile; /// H264 profile to use for encoding
int level; /// H264 level to use for encoding
int waitMethod; /// Method for switching between pause and capture
int onTime; /// In timed cycle mode, the amount of time the capture is on per cycle
int offTime; /// In timed cycle mode, the amount of time the capture is off per cycle
int segmentSize; /// Segment mode In timed cycle mode, the amount of time the capture is off per cycle
int segmentWrap; /// Point at which to wrap segment counter
int segmentNumber; /// Current segment counter
int splitNow; /// Split at next possible i-frame if set to 1.
int splitWait; /// Switch if user wants splited files
RASPIPREVIEW_PARAMETERS preview_parameters; /// Preview setup parameters
RASPICAM_CAMERA_PARAMETERS camera_parameters; /// Camera setup parameters
MMAL_COMPONENT_T *camera_component; /// Pointer to the camera component
MMAL_COMPONENT_T *splitter_component; /// Pointer to the splitter component
MMAL_COMPONENT_T *encoder_component; /// Pointer to the encoder component
MMAL_CONNECTION_T *preview_connection; /// Pointer to the connection from camera or splitter to preview
MMAL_CONNECTION_T *splitter_connection;/// Pointer to the connection from camera to splitter
MMAL_CONNECTION_T *encoder_connection; /// Pointer to the connection from camera to encoder
MMAL_POOL_T *splitter_pool; /// Pointer to the pool of buffers used by splitter output port 0
MMAL_POOL_T *encoder_pool; /// Pointer to the pool of buffers used by encoder output port
PORT_USERDATA callback_data; /// Used to move data to the encoder callback
int bCapturing; /// State of capture/pause
int bCircularBuffer; /// Whether we are writing to a circular buffer
int inlineMotionVectors; /// Encoder outputs inline Motion Vectors
char *imv_filename; /// filename of inline Motion Vectors output
int raw_output; /// Output raw video from camera as well
RAW_OUTPUT_FMT raw_output_fmt; /// The raw video format
char *raw_filename; /// Filename for raw video output
int cameraNum; /// Camera number
int settings; /// Request settings from the camera
int sensor_mode; /// Sensor mode. 0=auto. Check docs/forum for modes selected by other values.
int intra_refresh_type; /// What intra refresh type to use. -1 to not set.
int frame;
char *pts_filename;
int save_pts;
int64_t starttime;
int64_t lasttime;
char *clock_realtime_filename;
int save_clock_realtime;
char *datetime_linux_filename;
int save_datetime_linux;
};
/// Structure to cross reference H264 profile strings against the MMAL parameter equivalent
static XREF_T profile_map[] =
{
{"baseline", MMAL_VIDEO_PROFILE_H264_BASELINE},
{"main", MMAL_VIDEO_PROFILE_H264_MAIN},
{"high", MMAL_VIDEO_PROFILE_H264_HIGH},
// {"constrained", MMAL_VIDEO_PROFILE_H264_CONSTRAINED_BASELINE} // Does anyone need this?
};
static int profile_map_size = sizeof(profile_map) / sizeof(profile_map[0]);
/// Structure to cross reference H264 level strings against the MMAL parameter equivalent
static XREF_T level_map[] =
{
{"4", MMAL_VIDEO_LEVEL_H264_4},
{"4.1", MMAL_VIDEO_LEVEL_H264_41},
{"4.2", MMAL_VIDEO_LEVEL_H264_42},
};
static int level_map_size = sizeof(level_map) / sizeof(level_map[0]);
static XREF_T initial_map[] =
{
{"record", 0},
{"pause", 1},
};
static int initial_map_size = sizeof(initial_map) / sizeof(initial_map[0]);
static XREF_T intra_refresh_map[] =
{
{"cyclic", MMAL_VIDEO_INTRA_REFRESH_CYCLIC},
{"adaptive", MMAL_VIDEO_INTRA_REFRESH_ADAPTIVE},
{"both", MMAL_VIDEO_INTRA_REFRESH_BOTH},
{"cyclicrows", MMAL_VIDEO_INTRA_REFRESH_CYCLIC_MROWS},
// {"random", MMAL_VIDEO_INTRA_REFRESH_PSEUDO_RAND} Cannot use random, crashes the encoder. No idea why.
};
static int intra_refresh_map_size = sizeof(intra_refresh_map) / sizeof(intra_refresh_map[0]);
static XREF_T raw_output_fmt_map[] =
{
{"yuv", RAW_OUTPUT_FMT_YUV},
{"rgb", RAW_OUTPUT_FMT_RGB},
{"gray", RAW_OUTPUT_FMT_GRAY},
};
static int raw_output_fmt_map_size = sizeof(raw_output_fmt_map) / sizeof(raw_output_fmt_map[0]);
static void display_valid_parameters(char *app_name);
/// Command ID's and Structure defining our command line options
#define CommandHelp 0
#define CommandWidth 1
#define CommandHeight 2
#define CommandBitrate 3
#define CommandOutput 4
#define CommandVerbose 5
#define CommandTimeout 6
#define CommandDemoMode 7
#define CommandFramerate 8
#define CommandPreviewEnc 9
#define CommandIntraPeriod 10
#define CommandProfile 11
#define CommandTimed 12
#define CommandSignal 13
#define CommandKeypress 14
#define CommandInitialState 15
#define CommandQP 16
#define CommandInlineHeaders 17
#define CommandSegmentFile 18
#define CommandSegmentWrap 19
#define CommandSegmentStart 20
#define CommandSplitWait 21
#define CommandCircular 22
#define CommandIMV 23
#define CommandCamSelect 24
#define CommandSettings 25
#define CommandSensorMode 26
#define CommandIntraRefreshType 27
#define CommandFlush 28
#define CommandSavePTS 29
#define CommandCodec 30
#define CommandLevel 31
#define CommandRaw 32
#define CommandRawFormat 33
#define CommandSaveClockRealtime 34
#define CommandSaveDateTimeLinux 35
static COMMAND_LIST cmdline_commands[] =
{
{ CommandHelp, "-help", "?", "This help information", 0 },
{ CommandWidth, "-width", "w", "Set image width <size>. Default 1920", 1 },
{ CommandHeight, "-height", "h", "Set image height <size>. Default 1080", 1 },
{ CommandBitrate, "-bitrate", "b", "Set bitrate. Use bits per second (e.g. 10MBits/s would be -b 10000000)", 1 },
{ CommandOutput, "-output", "o", "Output filename <filename> (to write to stdout, use '-o -')", 1 },
{ CommandVerbose, "-verbose", "v", "Output verbose information during run", 0 },
{ CommandTimeout, "-timeout", "t", "Time (in ms) to capture for. If not specified, set to 5s. Zero to disable", 1 },
{ CommandDemoMode, "-demo", "d", "Run a demo mode (cycle through range of camera options, no capture)", 1},
{ CommandFramerate, "-framerate", "fps","Specify the frames per second to record", 1},
{ CommandPreviewEnc, "-penc", "e", "Display preview image *after* encoding (shows compression artifacts)", 0},
{ CommandIntraPeriod, "-intra", "g", "Specify the intra refresh period (key frame rate/GoP size). Zero to produce an initial I-frame and then just P-frames.", 1},
{ CommandProfile, "-profile", "pf", "Specify H264 profile to use for encoding", 1},
{ CommandTimed, "-timed", "td", "Cycle between capture and pause. -cycle on,off where on is record time and off is pause time in ms", 0},
{ CommandSignal, "-signal", "s", "Cycle between capture and pause on Signal", 0},
{ CommandKeypress, "-keypress", "k", "Cycle between capture and pause on ENTER", 0},
{ CommandInitialState, "-initial", "i", "Initial state. Use 'record' or 'pause'. Default 'record'", 1},
{ CommandQP, "-qp", "qp", "Quantisation parameter. Use approximately 10-40. Default 0 (off)", 1},
{ CommandInlineHeaders, "-inline", "ih", "Insert inline headers (SPS, PPS) to stream", 0},
{ CommandSegmentFile, "-segment", "sg", "Segment output file in to multiple files at specified interval <ms>", 1},
{ CommandSegmentWrap, "-wrap", "wr", "In segment mode, wrap any numbered filename back to 1 when reach number", 1},
{ CommandSegmentStart, "-start", "sn", "In segment mode, start with specified segment number", 1},
{ CommandSplitWait, "-split", "sp", "In wait mode, create new output file for each start event", 0},
{ CommandCircular, "-circular", "c", "Run encoded data through circular buffer until triggered then save", 0},
{ CommandIMV, "-vectors", "x", "Output filename <filename> for inline motion vectors", 1 },
{ CommandCamSelect, "-camselect", "cs", "Select camera <number>. Default 0", 1 },
{ CommandSettings, "-settings", "set","Retrieve camera settings and write to stdout", 0},
{ CommandSensorMode, "-mode", "md", "Force sensor mode. 0=auto. See docs for other modes available", 1},
{ CommandIntraRefreshType,"-irefresh", "if", "Set intra refresh type", 1},
{ CommandFlush, "-flush", "fl", "Flush buffers in order to decrease latency", 0 },
{ CommandSavePTS, "-save-pts", "pts","Save Timestamps to file for mkvmerge", 1 },
{ CommandCodec, "-codec", "cd", "Specify the codec to use - H264 (default) or MJPEG", 1 },
{ CommandLevel, "-level", "lev","Specify H264 level to use for encoding", 1},
{ CommandRaw, "-raw", "r", "Output filename <filename> for raw video", 1 },
{ CommandRawFormat, "-raw-format", "rf", "Specify output format for raw video. Default is yuv", 1},
{ CommandSaveClockRealtime, "-save-clockrt", "crt","Save CLOCK_REALTIME Timestamps to file", 1 },
{ CommandSaveDateTimeLinux, "-save-dtlinux", "dtl","Save datetime linux Timestamps to file", 1 },
};
static int cmdline_commands_size = sizeof(cmdline_commands) / sizeof(cmdline_commands[0]);
static struct
{
char *description;
int nextWaitMethod;
} wait_method_description[] =
{
{"Simple capture", WAIT_METHOD_NONE},
{"Capture forever", WAIT_METHOD_FOREVER},
{"Cycle on time", WAIT_METHOD_TIMED},
{"Cycle on keypress", WAIT_METHOD_KEYPRESS},
{"Cycle on signal", WAIT_METHOD_SIGNAL},
};
static int wait_method_description_size = sizeof(wait_method_description) / sizeof(wait_method_description[0]);
/**
* Assign a default set of parameters to the state passed in
*
* @param state Pointer to state structure to assign defaults to
*/
static void default_status(RASPIVID_STATE *state)
{
if (!state)
{
vcos_assert(0);
return;
}
// Default everything to zero
memset(state, 0, sizeof(RASPIVID_STATE));
// Now set anything non-zero
state->timeout = 5000; // 5s delay before take image
state->width = 1920; // Default to 1080p
state->height = 1080;
state->encoding = MMAL_ENCODING_H264;
state->bitrate = 17000000; // This is a decent default bitrate for 1080p
state->framerate = VIDEO_FRAME_RATE_NUM;
state->intraperiod = -1; // Not set
state->quantisationParameter = 0;
state->demoMode = 0;
state->demoInterval = 250; // ms
state->immutableInput = 1;
state->profile = MMAL_VIDEO_PROFILE_H264_HIGH;
state->level = MMAL_VIDEO_LEVEL_H264_4;
state->waitMethod = WAIT_METHOD_NONE;
state->onTime = 5000;
state->offTime = 5000;
state->bCapturing = 0;
state->bInlineHeaders = 0;
state->segmentSize = 0; // 0 = not segmenting the file.
state->segmentNumber = 1;
state->segmentWrap = 0; // Point at which to wrap segment number back to 1. 0 = no wrap
state->splitNow = 0;
state->splitWait = 0;
state->inlineMotionVectors = 0;
state->cameraNum = 0;
state->settings = 0;
state->sensor_mode = 0;
state->intra_refresh_type = -1;
state->frame = 0;
state->save_pts = 0;
state->save_clock_realtime=0;
state->save_datetime_linux=0;
// Setup preview window defaults
raspipreview_set_defaults(&state->preview_parameters);
// Set up the camera_parameters to default
raspicamcontrol_set_defaults(&state->camera_parameters);
}
/**
* Dump image state parameters to stderr.
*
* @param state Pointer to state structure to assign defaults to
*/
static void dump_status(RASPIVID_STATE *state)
{
int i;
if (!state)
{
vcos_assert(0);
return;
}
fprintf(stderr, "Width %d, Height %d, filename %s\n", state->width, state->height, state->filename);
fprintf(stderr, "bitrate %d, framerate %d, time delay %d\n", state->bitrate, state->framerate, state->timeout);
fprintf(stderr, "H264 Profile %s\n", raspicli_unmap_xref(state->profile, profile_map, profile_map_size));
fprintf(stderr, "H264 Level %s\n", raspicli_unmap_xref(state->level, level_map, level_map_size));
fprintf(stderr, "H264 Quantisation level %d, Inline headers %s\n", state->quantisationParameter, state->bInlineHeaders ? "Yes" : "No");
fprintf(stderr, "H264 Intra refresh type %s, period %d\n", raspicli_unmap_xref(state->intra_refresh_type, intra_refresh_map, intra_refresh_map_size), state->intraperiod);
// Not going to display segment data unless asked for it.
if (state->segmentSize)
fprintf(stderr, "Segment size %d, segment wrap value %d, initial segment number %d\n", state->segmentSize, state->segmentWrap, state->segmentNumber);
if (state->raw_output)
fprintf(stderr, "Raw output enabled, format %s\n", raspicli_unmap_xref(state->raw_output_fmt, raw_output_fmt_map, raw_output_fmt_map_size));
fprintf(stderr, "Wait method : ");
for (i=0;i<wait_method_description_size;i++)
{
if (state->waitMethod == wait_method_description[i].nextWaitMethod)
fprintf(stderr, "%s", wait_method_description[i].description);
}
fprintf(stderr, "\nInitial state '%s'\n", raspicli_unmap_xref(state->bCapturing, initial_map, initial_map_size));
fprintf(stderr, "\n\n");
raspipreview_dump_parameters(&state->preview_parameters);
raspicamcontrol_dump_parameters(&state->camera_parameters);
}
/**
* Parse the incoming command line and put resulting parameters in to the state
*
* @param argc Number of arguments in command line
* @param argv Array of pointers to strings from command line
* @param state Pointer to state structure to assign any discovered parameters to
* @return Non-0 if failed for some reason, 0 otherwise
*/
static int parse_cmdline(int argc, const char **argv, RASPIVID_STATE *state)
{
// Parse the command line arguments.
// We are looking for --<something> or -<abbreviation of something>
int valid = 1;
int i;
for (i = 1; i < argc && valid; i++)
{
int command_id, num_parameters;
if (!argv[i])
continue;
if (argv[i][0] != '-')
{
valid = 0;
continue;
}
// Assume parameter is valid until proven otherwise
valid = 1;
command_id = raspicli_get_command_id(cmdline_commands, cmdline_commands_size, &argv[i][1], &num_parameters);
// If we found a command but are missing a parameter, continue (and we will drop out of the loop)
if (command_id != -1 && num_parameters > 0 && (i + 1 >= argc) )
continue;
// We are now dealing with a command line option
switch (command_id)
{
case CommandHelp:
display_valid_parameters(basename(argv[0]));
return -1;
case CommandWidth: // Width > 0
if (sscanf(argv[i + 1], "%u", &state->width) != 1)
valid = 0;
else
i++;
break;
case CommandHeight: // Height > 0
if (sscanf(argv[i + 1], "%u", &state->height) != 1)
valid = 0;
else
i++;
break;
case CommandBitrate: // 1-100
if (sscanf(argv[i + 1], "%u", &state->bitrate) == 1)
{
i++;
}
else
valid = 0;
break;
case CommandOutput: // output filename
{
int len = strlen(argv[i + 1]);
if (len)
{
state->filename = malloc(len + 1);
vcos_assert(state->filename);
if (state->filename)
strncpy(state->filename, argv[i + 1], len+1);
i++;
}
else
valid = 0;
break;
}
case CommandVerbose: // display lots of data during run
state->verbose = 1;
break;
case CommandTimeout: // Time to run viewfinder/capture
{
if (sscanf(argv[i + 1], "%u", &state->timeout) == 1)
{
// Ensure that if previously selected a waitMethod we don't overwrite it
if (state->timeout == 0 && state->waitMethod == WAIT_METHOD_NONE)
state->waitMethod = WAIT_METHOD_FOREVER;
i++;
}
else
valid = 0;
break;
}
case CommandDemoMode: // Run in demo mode - no capture
{
// Demo mode might have a timing parameter
// so check if a) we have another parameter, b) its not the start of the next option
if (i + 1 < argc && argv[i+1][0] != '-')
{
if (sscanf(argv[i + 1], "%u", &state->demoInterval) == 1)
{
// TODO : What limits do we need for timeout?
if (state->demoInterval == 0)
state->demoInterval = 250; // ms
state->demoMode = 1;
i++;
}
else
valid = 0;
}
else
{
state->demoMode = 1;
}
break;
}
case CommandFramerate: // fps to record
{
if (sscanf(argv[i + 1], "%u", &state->framerate) == 1)
{
// TODO : What limits do we need for fps 1 - 30 - 120??
i++;
}
else
valid = 0;
break;
}
case CommandPreviewEnc:
state->immutableInput = 0;
break;
case CommandIntraPeriod: // key frame rate
{
if (sscanf(argv[i + 1], "%u", &state->intraperiod) == 1)
i++;
else
valid = 0;
break;
}
case CommandQP: // quantisation parameter
{
if (sscanf(argv[i + 1], "%u", &state->quantisationParameter) == 1)
i++;
else
valid = 0;
break;
}
case CommandProfile: // H264 profile
{
state->profile = raspicli_map_xref(argv[i + 1], profile_map, profile_map_size);
if( state->profile == -1)
state->profile = MMAL_VIDEO_PROFILE_H264_HIGH;
i++;
break;
}
case CommandInlineHeaders: // H264 inline headers
{
state->bInlineHeaders = 1;
break;
}
case CommandTimed:
{
if (sscanf(argv[i + 1], "%u,%u", &state->onTime, &state->offTime) == 2)
{
i++;
if (state->onTime < 1000)
state->onTime = 1000;
if (state->offTime < 1000)
state->offTime = 1000;
state->waitMethod = WAIT_METHOD_TIMED;
}
else
valid = 0;
break;
}
case CommandKeypress:
state->waitMethod = WAIT_METHOD_KEYPRESS;
break;
case CommandSignal:
state->waitMethod = WAIT_METHOD_SIGNAL;
// Reenable the signal
signal(SIGUSR1, signal_handler);
break;
case CommandInitialState:
{
state->bCapturing = raspicli_map_xref(argv[i + 1], initial_map, initial_map_size);
if( state->bCapturing == -1)
state->bCapturing = 0;
i++;
break;
}
case CommandSegmentFile: // Segment file in to chunks of specified time
{
if (sscanf(argv[i + 1], "%u", &state->segmentSize) == 1)
{
// Must enable inline headers for this to work
state->bInlineHeaders = 1;
i++;
}
else
valid = 0;
break;
}
case CommandSegmentWrap: // segment wrap value
{
if (sscanf(argv[i + 1], "%u", &state->segmentWrap) == 1)
i++;
else
valid = 0;
break;
}
case CommandSegmentStart: // initial segment number
{
if((sscanf(argv[i + 1], "%u", &state->segmentNumber) == 1) && (!state->segmentWrap || (state->segmentNumber <= state->segmentWrap)))
i++;
else
valid = 0;
break;
}
case CommandSplitWait: // split files on restart
{
// Must enable inline headers for this to work
state->bInlineHeaders = 1;
state->splitWait = 1;
break;
}
case CommandCircular:
{
state->bCircularBuffer = 1;
break;
}
case CommandIMV: // output filename
{
state->inlineMotionVectors = 1;
int len = strlen(argv[i + 1]);
if (len)
{
state->imv_filename = malloc(len + 1);
vcos_assert(state->imv_filename);
if (state->imv_filename)
strncpy(state->imv_filename, argv[i + 1], len+1);
i++;
}
else
valid = 0;
break;
}
case CommandCamSelect: //Select camera input port
{
if (sscanf(argv[i + 1], "%u", &state->cameraNum) == 1)
{
i++;
}
else
valid = 0;
break;
}
case CommandSettings:
state->settings = 1;
break;
case CommandSensorMode:
{
if (sscanf(argv[i + 1], "%u", &state->sensor_mode) == 1)
{
i++;
}
else
valid = 0;
break;
}
case CommandIntraRefreshType:
{
state->intra_refresh_type = raspicli_map_xref(argv[i + 1], intra_refresh_map, intra_refresh_map_size);
i++;
break;
}
case CommandFlush:
{
state->callback_data.flush_buffers = 1;
break;
}
case CommandSavePTS: // output filename
{
state->save_pts = 1;
int len = strlen(argv[i + 1]);
if (len)
{
state->pts_filename = malloc(len + 1);
vcos_assert(state->pts_filename);
if (state->pts_filename)
strncpy(state->pts_filename, argv[i + 1], len+1);
i++;
}
else
valid = 0;
break;
}
case CommandSaveClockRealtime: // output filename
{
state->save_clock_realtime = 1;
int len = strlen(argv[i + 1]);
if (len)
{
state->clock_realtime_filename = malloc(len + 1);
if (state->clock_realtime_filename)
strncpy(state->clock_realtime_filename, argv[i + 1], len+1);
i++;
}
else
valid = 0;
break;
}
case CommandSaveDateTimeLinux: // output filename
{
state->save_datetime_linux = 1;
int len = strlen(argv[i + 1]);
if (len)
{
state->datetime_linux_filename = malloc(len + 1);
if (state->datetime_linux_filename)
strncpy(state->datetime_linux_filename, argv[i + 1], len+1);
i++;
}
else
valid = 0;
break;
}
case CommandCodec: // codec type
{
int len = strlen(argv[i + 1]);
if (len)
{
if (len==4 && !strncmp("H264", argv[i+1], 4))
state->encoding = MMAL_ENCODING_H264;
else if (len==5 && !strncmp("MJPEG", argv[i+1], 5))
state->encoding = MMAL_ENCODING_MJPEG;
else
valid = 0;
i++;
}
else
valid = 0;
break;
}
case CommandLevel: // H264 level
{
state->level = raspicli_map_xref(argv[i + 1], level_map, level_map_size);
if( state->level == -1)
state->level = MMAL_VIDEO_LEVEL_H264_4;
i++;
break;
}
case CommandRaw: // output filename
{
state->raw_output = 1;
state->raw_output_fmt = RAW_OUTPUT_FMT_YUV;
int len = strlen(argv[i + 1]);
if (len)
{
state->raw_filename = malloc(len + 1);
vcos_assert(state->raw_filename);
if (state->raw_filename)
strncpy(state->raw_filename, argv[i + 1], len+1);
i++;
}
else
valid = 0;
break;
}
case CommandRawFormat:
{
state->raw_output_fmt = raspicli_map_xref(argv[i + 1], raw_output_fmt_map, raw_output_fmt_map_size);
if (state->raw_output_fmt == -1)
valid = 0;
i++;
break;
}
default:
{
// Try parsing for any image specific parameters
// result indicates how many parameters were used up, 0,1,2
// but we adjust by -1 as we have used one already
const char *second_arg = (i + 1 < argc) ? argv[i + 1] : NULL;
int parms_used = (raspicamcontrol_parse_cmdline(&state->camera_parameters, &argv[i][1], second_arg));
// Still unused, try preview options
if (!parms_used)
parms_used = raspipreview_parse_cmdline(&state->preview_parameters, &argv[i][1], second_arg);
// If no parms were used, this must be a bad parameters
if (!parms_used)
valid = 0;
else
i += parms_used - 1;
break;
}
}
}
if (!valid)
{
fprintf(stderr, "Invalid command line option (%s)\n", argv[i-1]);
return 1;
}
// Always disable verbose if output going to stdout
if (state->filename && state->filename[0] == '-')
{
state->verbose = 0;
}
return 0;
}
/**
* Display usage information for the application to stdout
*
* @param app_name String to display as the application name
*/
static void display_valid_parameters(char *app_name)
{
int i;
fprintf(stdout, "Display camera output to display, and optionally saves an H264 capture at requested bitrate\n\n");
fprintf(stdout, "\nusage: %s [options]\n\n", app_name);
fprintf(stdout, "Image parameter commands\n\n");
raspicli_display_help(cmdline_commands, cmdline_commands_size);
// Profile options
fprintf(stdout, "\n\nH264 Profile options :\n%s", profile_map[0].mode );
for (i=1;i<profile_map_size;i++)
{
fprintf(stdout, ",%s", profile_map[i].mode);
}
// Level options
fprintf(stdout, "\n\nH264 Level options :\n%s", level_map[0].mode );
for (i=1;i<level_map_size;i++)
{
fprintf(stdout, ",%s", level_map[i].mode);
}
// Intra refresh options
fprintf(stdout, "\n\nH264 Intra refresh options :\n%s", intra_refresh_map[0].mode );
for (i=1;i<intra_refresh_map_size;i++)
{
fprintf(stdout, ",%s", intra_refresh_map[i].mode);
}
// Raw output format options
fprintf(stdout, "\n\nRaw output format options :\n%s", raw_output_fmt_map[0].mode );
for (i=1;i<raw_output_fmt_map_size;i++)
{
fprintf(stdout, ",%s", raw_output_fmt_map[i].mode);
}
fprintf(stdout, "\n");
// Help for preview options
raspipreview_display_help();
// Now display any help information from the camcontrol code
raspicamcontrol_display_help();
fprintf(stdout, "\n");
return;
}
/**
* buffer header callback function for camera control
*
* Callback will dump buffer data to the specific file
*
* @param port Pointer to port from which callback originated
* @param buffer mmal buffer header pointer
*/
static void camera_control_callback(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer)
{
if (buffer->cmd == MMAL_EVENT_PARAMETER_CHANGED)
{
MMAL_EVENT_PARAMETER_CHANGED_T *param = (MMAL_EVENT_PARAMETER_CHANGED_T *)buffer->data;
switch (param->hdr.id) {
case MMAL_PARAMETER_CAMERA_SETTINGS:
{
MMAL_PARAMETER_CAMERA_SETTINGS_T *settings = (MMAL_PARAMETER_CAMERA_SETTINGS_T*)param;
vcos_log_error("Exposure now %u, analog gain %u/%u, digital gain %u/%u",
settings->exposure,
settings->analog_gain.num, settings->analog_gain.den,
settings->digital_gain.num, settings->digital_gain.den);
vcos_log_error("AWB R=%u/%u, B=%u/%u",
settings->awb_red_gain.num, settings->awb_red_gain.den,
settings->awb_blue_gain.num, settings->awb_blue_gain.den
);
}
break;
}
}
else if (buffer->cmd == MMAL_EVENT_ERROR)
{
vcos_log_error("No data received from sensor. Check all connections, including the Sunny one on the camera board");
}
else
{
vcos_log_error("Received unexpected camera control callback event, 0x%08x", buffer->cmd);
}
mmal_buffer_header_release(buffer);
}
/**
* Open a file based on the settings in state
*
* @param state Pointer to state
*/
static FILE *open_filename(RASPIVID_STATE *pState, char *filename)
{
FILE *new_handle = NULL;
char *tempname = NULL;
if (pState->segmentSize || pState->splitWait)
{
// Create a new filename string
asprintf(&tempname, filename, pState->segmentNumber);
filename = tempname;
}
if (filename)
{
int network = 0, socktype;
if(!strncmp("tcp://", filename, 6))
{
network = 1;
socktype = SOCK_STREAM;
}
if(!strncmp("udp://", filename, 6))
{
network = 1;
socktype = SOCK_DGRAM;
}
if(network)
{
filename += 6;
char *colon;
colon = strchr(filename, ':');
int sfd = socket(AF_INET, socktype, 0);
if(sfd < 0)
{
perror("socket");
}
unsigned short port;
sscanf(colon + 1, "%hu", &port);
*colon = 0;
fprintf(stderr, "Connecting to %s:%hu\n", filename, port);
struct sockaddr_in saddr;
saddr.sin_family = AF_INET;
saddr.sin_port = htons(port);
inet_aton(filename, &saddr.sin_addr);
if(connect(sfd, (struct sockaddr *)&saddr, sizeof(struct sockaddr_in)) < 0)
{
perror("connect");
}
new_handle = fdopen(sfd, "w");
}
else
{
new_handle = fopen(filename, "wb");
}
}
if (pState->verbose)
{
if (new_handle)
fprintf(stderr, "Opening output file \"%s\"\n", filename);
else
fprintf(stderr, "Failed to open new file \"%s\"\n", filename);
}
if (tempname)
free(tempname);
return new_handle;
}
/**
* buffer header callback function for encoder
*
* Callback will dump buffer data to the specific file
*
* @param port Pointer to port from which callback originated
* @param buffer mmal buffer header pointer
*/
static void encoder_buffer_callback(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer)
{
MMAL_BUFFER_HEADER_T *new_buffer;
static int64_t base_time = -1;
static int64_t last_second = -1;
// All our segment times based on the receipt of the first encoder callback
if (base_time == -1)
base_time = vcos_getmicrosecs64()/1000;
// We pass our file handle and other stuff in via the userdata field.
PORT_USERDATA *pData = (PORT_USERDATA *)port->userdata;
if (pData)
{
int bytes_written = buffer->length;
int64_t current_time = vcos_getmicrosecs64()/1000;
vcos_assert(pData->file_handle);
if(pData->pstate->inlineMotionVectors) vcos_assert(pData->imv_file_handle);
if (pData->cb_buff)
{
int space_in_buff = pData->cb_len - pData->cb_wptr;
int copy_to_end = space_in_buff > buffer->length ? buffer->length : space_in_buff;
int copy_to_start = buffer->length - copy_to_end;
if(buffer->flags & MMAL_BUFFER_HEADER_FLAG_CONFIG)
{
if(pData->header_wptr + buffer->length > sizeof(pData->header_bytes))
{
vcos_log_error("Error in header bytes\n");
}
else
{
// These are the header bytes, save them for final output
mmal_buffer_header_mem_lock(buffer);
memcpy(pData->header_bytes + pData->header_wptr, buffer->data, buffer->length);
mmal_buffer_header_mem_unlock(buffer);
pData->header_wptr += buffer->length;
}
}
else if((buffer->flags & MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO))
{
// Do something with the inline motion vectors...
}
else
{
static int frame_start = -1;
int i;
if(frame_start == -1)
frame_start = pData->cb_wptr;
if(buffer->flags & MMAL_BUFFER_HEADER_FLAG_KEYFRAME)
{
pData->iframe_buff[pData->iframe_buff_wpos] = frame_start;
pData->iframe_buff_wpos = (pData->iframe_buff_wpos + 1) % IFRAME_BUFSIZE;
}
if(buffer->flags & MMAL_BUFFER_HEADER_FLAG_FRAME_END)
frame_start = -1;
// If we overtake the iframe rptr then move the rptr along
if((pData->iframe_buff_rpos + 1) % IFRAME_BUFSIZE != pData->iframe_buff_wpos)
{
while(
(
pData->cb_wptr <= pData->iframe_buff[pData->iframe_buff_rpos] &&
(pData->cb_wptr + buffer->length) > pData->iframe_buff[pData->iframe_buff_rpos]
) ||
(
(pData->cb_wptr > pData->iframe_buff[pData->iframe_buff_rpos]) &&
(pData->cb_wptr + buffer->length) > (pData->iframe_buff[pData->iframe_buff_rpos] + pData->cb_len)
)
)
pData->iframe_buff_rpos = (pData->iframe_buff_rpos + 1) % IFRAME_BUFSIZE;
}
mmal_buffer_header_mem_lock(buffer);
// We are pushing data into a circular buffer
memcpy(pData->cb_buff + pData->cb_wptr, buffer->data, copy_to_end);
memcpy(pData->cb_buff, buffer->data + copy_to_end, copy_to_start);
mmal_buffer_header_mem_unlock(buffer);
if((pData->cb_wptr + buffer->length) > pData->cb_len)
pData->cb_wrap = 1;
pData->cb_wptr = (pData->cb_wptr + buffer->length) % pData->cb_len;
for(i = pData->iframe_buff_rpos; i != pData->iframe_buff_wpos; i = (i + 1) % IFRAME_BUFSIZE)
{
int p = pData->iframe_buff[i];
if(pData->cb_buff[p] != 0 || pData->cb_buff[p+1] != 0 || pData->cb_buff[p+2] != 0 || pData->cb_buff[p+3] != 1)
{
vcos_log_error("Error in iframe list\n");
}
}
}
}
else
{
// For segmented record mode, we need to see if we have exceeded our time/size,
// but also since we have inline headers turned on we need to break when we get one to
// ensure that the new stream has the header in it. If we break on an I-frame, the
// SPS/PPS header is actually in the previous chunk.
if ((buffer->flags & MMAL_BUFFER_HEADER_FLAG_CONFIG) &&
((pData->pstate->segmentSize && current_time > base_time + pData->pstate->segmentSize) ||
(pData->pstate->splitWait && pData->pstate->splitNow)))
{
FILE *new_handle;
base_time = current_time;
pData->pstate->splitNow = 0;
pData->pstate->segmentNumber++;
// Only wrap if we have a wrap point set
if (pData->pstate->segmentWrap && pData->pstate->segmentNumber > pData->pstate->segmentWrap)
pData->pstate->segmentNumber = 1;
if (pData->pstate->filename && pData->pstate->filename[0] != '-')
{
new_handle = open_filename(pData->pstate, pData->pstate->filename);
if (new_handle)
{
fclose(pData->file_handle);
pData->file_handle = new_handle;
}
}
if (pData->pstate->imv_filename && pData->pstate->imv_filename[0] != '-')
{
new_handle = open_filename(pData->pstate, pData->pstate->imv_filename);
if (new_handle)
{
fclose(pData->imv_file_handle);
pData->imv_file_handle = new_handle;
}
}
if (pData->pstate->pts_filename && pData->pstate->pts_filename[0] != '-')
{
new_handle = open_filename(pData->pstate, pData->pstate->pts_filename);
if (new_handle)
{
fclose(pData->pts_file_handle);
pData->pts_file_handle = new_handle;
}
}
if (pData->pstate->clock_realtime_filename && pData->pstate->clock_realtime_filename[0] != '-')
{
new_handle = open_filename(pData->pstate, pData->pstate->clock_realtime_filename);
if (new_handle)
{
fclose(pData->clockrt_file_handle);
pData->clockrt_file_handle = new_handle;
}
}
if (pData->pstate->datetime_linux_filename && pData->pstate->datetime_linux_filename[0] != '-')
{
new_handle = open_filename(pData->pstate, pData->pstate->datetime_linux_filename);
if (new_handle)
{
fclose(pData->datetime_file_handle);
pData->datetime_file_handle = new_handle;
}
}
}
if (buffer->length)
{
mmal_buffer_header_mem_lock(buffer);
if(buffer->flags & MMAL_BUFFER_HEADER_FLAG_CODECSIDEINFO)
{
if(pData->pstate->inlineMotionVectors)
{
bytes_written = fwrite(buffer->data, 1, buffer->length, pData->imv_file_handle);
if(pData->flush_buffers) fflush(pData->imv_file_handle);
}
else
{
//We do not want to save inlineMotionVectors...
bytes_written = buffer->length;
}
}
else
{
bytes_written = fwrite(buffer->data, 1, buffer->length, pData->file_handle);
if(pData->flush_buffers) fflush(pData->file_handle);
if(pData->pstate->save_pts && (pData->pstate->save_clock_realtime) &&(pData->pstate->save_datetime_linux) &&
(buffer->flags & MMAL_BUFFER_HEADER_FLAG_FRAME_END ||
buffer->flags == 0 ||
buffer->flags & MMAL_BUFFER_HEADER_FLAG_KEYFRAME) &&
!(buffer->flags & MMAL_BUFFER_HEADER_FLAG_CONFIG))
{
if(buffer->pts != MMAL_TIME_UNKNOWN && buffer->pts != pData->pstate->lasttime)
{
int64_t pts;
struct timespec clockrt;
clock_gettime(CLOCK_REALTIME, &clockrt);
clockTime = clockrt.tv_sec + clockrt.tv_nsec/1E9;
struct timeval datetimelinux;
gettimeofday(&datetimelinux, NULL);
currentTime = datetimelinux.tv_sec*1000000.0 + datetimelinux.tv_usec;
if(pData->pstate->frame==0)pData->pstate->starttime=buffer->pts;
pData->pstate->lasttime=buffer->pts;
pts = buffer->pts - pData->pstate->starttime;
fprintf(pData->pts_file_handle,"%lld.%03lld\n", pts/1000, pts%1000);
fprintf(pData->clockrt_file_handle,"%f\n", clockTime);
fprintf(pData->datetime_file_handle,"%lu\n", currentTime);
pData->pstate->frame++;
}
}
}
mmal_buffer_header_mem_unlock(buffer);
if (bytes_written != buffer->length)
{
vcos_log_error("Failed to write buffer data (%d from %d)- aborting", bytes_written, buffer->length);
pData->abort = 1;
}
}
}
// See if the second count has changed and we need to update any annotation
if (current_time/1000 != last_second)
{
//update_annotation_data(pData->pstate);
last_second = current_time/1000;
}
}
else
{
vcos_log_error("Received a encoder buffer callback with no state");
}
// release buffer back to the pool
mmal_buffer_header_release(buffer);
// and send one back to the port (if still open)
if (port->is_enabled)
{
MMAL_STATUS_T status;
new_buffer = mmal_queue_get(pData->pstate->encoder_pool->queue);
if (new_buffer)
status = mmal_port_send_buffer(port, new_buffer);
if (!new_buffer || status != MMAL_SUCCESS)
vcos_log_error("Unable to return a buffer to the encoder port");
}
}
/**
* buffer header callback function for splitter
*
* Callback will dump buffer data to the specific file
*
* @param port Pointer to port from which callback originated
* @param buffer mmal buffer header pointer
*/
static void splitter_buffer_callback(MMAL_PORT_T *port, MMAL_BUFFER_HEADER_T *buffer)
{
MMAL_BUFFER_HEADER_T *new_buffer;
PORT_USERDATA *pData = (PORT_USERDATA *)port->userdata;
if (pData)
{
int bytes_written = 0;
int bytes_to_write = buffer->length;
/* Write only luma component to get grayscale image: */
if (buffer->length && pData->pstate->raw_output_fmt == RAW_OUTPUT_FMT_GRAY)
bytes_to_write = port->format->es->video.width * port->format->es->video.height;
vcos_assert(pData->raw_file_handle);
if (bytes_to_write)
{
mmal_buffer_header_mem_lock(buffer);
bytes_written = fwrite(buffer->data, 1, bytes_to_write, pData->raw_file_handle);
mmal_buffer_header_mem_unlock(buffer);
if (bytes_written != bytes_to_write)
{
vcos_log_error("Failed to write raw buffer data (%d from %d)- aborting", bytes_written, bytes_to_write);
pData->abort = 1;
}
}
}
else
{
vcos_log_error("Received a camera buffer callback with no state");
}
// release buffer back to the pool
mmal_buffer_header_release(buffer);
// and send one back to the port (if still open)
if (port->is_enabled)
{
MMAL_STATUS_T status;
new_buffer = mmal_queue_get(pData->pstate->splitter_pool->queue);
if (new_buffer)
status = mmal_port_send_buffer(port, new_buffer);
if (!new_buffer || status != MMAL_SUCCESS)
vcos_log_error("Unable to return a buffer to the splitter port");
}
}
/**
* Create the camera component, set up its ports
*
* @param state Pointer to state control struct
*
* @return MMAL_SUCCESS if all OK, something else otherwise
*
*/
static MMAL_STATUS_T create_camera_component(RASPIVID_STATE *state)
{
MMAL_COMPONENT_T *camera = 0;
MMAL_ES_FORMAT_T *format;
MMAL_PORT_T *preview_port = NULL, *video_port = NULL, *still_port = NULL;
MMAL_STATUS_T status;
/* Create the component */
status = mmal_component_create(MMAL_COMPONENT_DEFAULT_CAMERA, &camera);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Failed to create camera component");
goto error;
}
if (status != MMAL_SUCCESS)
{
vcos_log_error("Could not set stereo mode : error %d", status);
goto error;
}
MMAL_PARAMETER_INT32_T camera_num =
{{MMAL_PARAMETER_CAMERA_NUM, sizeof(camera_num)}, state->cameraNum};
status = mmal_port_parameter_set(camera->control, &camera_num.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Could not select camera : error %d", status);
goto error;
}
if (!camera->output_num)
{
status = MMAL_ENOSYS;
vcos_log_error("Camera doesn't have output ports");
goto error;
}
status = mmal_port_parameter_set_uint32(camera->control, MMAL_PARAMETER_CAMERA_CUSTOM_SENSOR_CONFIG, state->sensor_mode);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Could not set sensor mode : error %d", status);
goto error;
}
preview_port = camera->output[MMAL_CAMERA_PREVIEW_PORT];
video_port = camera->output[MMAL_CAMERA_VIDEO_PORT];
still_port = camera->output[MMAL_CAMERA_CAPTURE_PORT];
if (state->settings)
{
MMAL_PARAMETER_CHANGE_EVENT_REQUEST_T change_event_request =
{{MMAL_PARAMETER_CHANGE_EVENT_REQUEST, sizeof(MMAL_PARAMETER_CHANGE_EVENT_REQUEST_T)},
MMAL_PARAMETER_CAMERA_SETTINGS, 1};
status = mmal_port_parameter_set(camera->control, &change_event_request.hdr);
if ( status != MMAL_SUCCESS )
{
vcos_log_error("No camera settings events");
}
}
// Enable the camera, and tell it its control callback function
status = mmal_port_enable(camera->control, camera_control_callback);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to enable control port : error %d", status);
goto error;
}
// set up the camera configuration
{
MMAL_PARAMETER_CAMERA_CONFIG_T cam_config =
{
{ MMAL_PARAMETER_CAMERA_CONFIG, sizeof(cam_config) },
.max_stills_w = state->width,
.max_stills_h = state->height,
.stills_yuv422 = 0,
.one_shot_stills = 0,
.max_preview_video_w = state->width,
.max_preview_video_h = state->height,
.num_preview_video_frames = 3 + vcos_max(0, (state->framerate-30)/10),
.stills_capture_circular_buffer_height = 0,
.fast_preview_resume = 0,
.use_stc_timestamp = MMAL_PARAM_TIMESTAMP_MODE_RAW_STC
};
mmal_port_parameter_set(camera->control, &cam_config.hdr);
}
// Now set up the port formats
// Set the encode format on the Preview port
// HW limitations mean we need the preview to be the same size as the required recorded output
format = preview_port->format;
format->encoding = MMAL_ENCODING_OPAQUE;
format->encoding_variant = MMAL_ENCODING_I420;
if(state->camera_parameters.shutter_speed > 6000000)
{
MMAL_PARAMETER_FPS_RANGE_T fps_range = {{MMAL_PARAMETER_FPS_RANGE, sizeof(fps_range)},
{ 50, 1000 }, {166, 1000}};
mmal_port_parameter_set(preview_port, &fps_range.hdr);
}
else if(state->camera_parameters.shutter_speed > 1000000)
{
MMAL_PARAMETER_FPS_RANGE_T fps_range = {{MMAL_PARAMETER_FPS_RANGE, sizeof(fps_range)},
{ 166, 1000 }, {999, 1000}};
mmal_port_parameter_set(preview_port, &fps_range.hdr);
}
//enable dynamic framerate if necessary
if (state->camera_parameters.shutter_speed)
{
if (state->framerate > 1000000./state->camera_parameters.shutter_speed)
{
state->framerate=0;
if (state->verbose)
fprintf(stderr, "Enable dynamic frame rate to fulfil shutter speed requirement\n");
}
}
format->encoding = MMAL_ENCODING_OPAQUE;
format->es->video.width = VCOS_ALIGN_UP(state->width, 32);
format->es->video.height = VCOS_ALIGN_UP(state->height, 16);
format->es->video.crop.x = 0;
format->es->video.crop.y = 0;
format->es->video.crop.width = state->width;
format->es->video.crop.height = state->height;
format->es->video.frame_rate.num = PREVIEW_FRAME_RATE_NUM;
format->es->video.frame_rate.den = PREVIEW_FRAME_RATE_DEN;
status = mmal_port_format_commit(preview_port);
if (status != MMAL_SUCCESS)
{
vcos_log_error("camera viewfinder format couldn't be set");
goto error;
}
// Set the encode format on the video port
format = video_port->format;
format->encoding_variant = MMAL_ENCODING_I420;
if(state->camera_parameters.shutter_speed > 6000000)
{
MMAL_PARAMETER_FPS_RANGE_T fps_range = {{MMAL_PARAMETER_FPS_RANGE, sizeof(fps_range)},
{ 50, 1000 }, {166, 1000}};
mmal_port_parameter_set(video_port, &fps_range.hdr);
}
else if(state->camera_parameters.shutter_speed > 1000000)
{
MMAL_PARAMETER_FPS_RANGE_T fps_range = {{MMAL_PARAMETER_FPS_RANGE, sizeof(fps_range)},
{ 167, 1000 }, {999, 1000}};
mmal_port_parameter_set(video_port, &fps_range.hdr);
}
format->encoding = MMAL_ENCODING_OPAQUE;
format->es->video.width = VCOS_ALIGN_UP(state->width, 32);
format->es->video.height = VCOS_ALIGN_UP(state->height, 16);
format->es->video.crop.x = 0;
format->es->video.crop.y = 0;
format->es->video.crop.width = state->width;
format->es->video.crop.height = state->height;
format->es->video.frame_rate.num = state->framerate;
format->es->video.frame_rate.den = VIDEO_FRAME_RATE_DEN;
status = mmal_port_format_commit(video_port);
if (status != MMAL_SUCCESS)
{
vcos_log_error("camera video format couldn't be set");
goto error;
}
// Ensure there are enough buffers to avoid dropping frames
if (video_port->buffer_num < VIDEO_OUTPUT_BUFFERS_NUM)
video_port->buffer_num = VIDEO_OUTPUT_BUFFERS_NUM;
// Set the encode format on the still port
format = still_port->format;
format->encoding = MMAL_ENCODING_OPAQUE;
format->encoding_variant = MMAL_ENCODING_I420;
format->es->video.width = VCOS_ALIGN_UP(state->width, 32);
format->es->video.height = VCOS_ALIGN_UP(state->height, 16);
format->es->video.crop.x = 0;
format->es->video.crop.y = 0;
format->es->video.crop.width = state->width;
format->es->video.crop.height = state->height;
format->es->video.frame_rate.num = 0;
format->es->video.frame_rate.den = 1;
status = mmal_port_format_commit(still_port);
if (status != MMAL_SUCCESS)
{
vcos_log_error("camera still format couldn't be set");
goto error;
}
/* Ensure there are enough buffers to avoid dropping frames */
if (still_port->buffer_num < VIDEO_OUTPUT_BUFFERS_NUM)
still_port->buffer_num = VIDEO_OUTPUT_BUFFERS_NUM;
/* Enable component */
status = mmal_component_enable(camera);
if (status != MMAL_SUCCESS)
{
vcos_log_error("camera component couldn't be enabled");
goto error;
}
raspicamcontrol_set_all_parameters(camera, &state->camera_parameters);
state->camera_component = camera;
//update_annotation_data(state);
if (state->verbose)
fprintf(stderr, "Camera component done\n");
return status;
error:
if (camera)
mmal_component_destroy(camera);
return status;
}
/**
* Destroy the camera component
*
* @param state Pointer to state control struct
*
*/
static void destroy_camera_component(RASPIVID_STATE *state)
{
if (state->camera_component)
{
mmal_component_destroy(state->camera_component);
state->camera_component = NULL;
}
}
/**
* Create the splitter component, set up its ports
*
* @param state Pointer to state control struct
*
* @return MMAL_SUCCESS if all OK, something else otherwise
*
*/
static MMAL_STATUS_T create_splitter_component(RASPIVID_STATE *state)
{
MMAL_COMPONENT_T *splitter = 0;
MMAL_PORT_T *splitter_output = NULL;
MMAL_ES_FORMAT_T *format;
MMAL_STATUS_T status;
MMAL_POOL_T *pool;
int i;
if (state->camera_component == NULL)
{
status = MMAL_ENOSYS;
vcos_log_error("Camera component must be created before splitter");
goto error;
}
/* Create the component */
status = mmal_component_create(MMAL_COMPONENT_DEFAULT_VIDEO_SPLITTER, &splitter);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Failed to create splitter component");
goto error;
}
if (!splitter->input_num)
{
status = MMAL_ENOSYS;
vcos_log_error("Splitter doesn't have any input port");
goto error;
}
if (splitter->output_num < 2)
{
status = MMAL_ENOSYS;
vcos_log_error("Splitter doesn't have enough output ports");
goto error;
}
/* Ensure there are enough buffers to avoid dropping frames: */
mmal_format_copy(splitter->input[0]->format, state->camera_component->output[MMAL_CAMERA_PREVIEW_PORT]->format);
if (splitter->input[0]->buffer_num < VIDEO_OUTPUT_BUFFERS_NUM)
splitter->input[0]->buffer_num = VIDEO_OUTPUT_BUFFERS_NUM;
status = mmal_port_format_commit(splitter->input[0]);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set format on splitter input port");
goto error;
}
/* Splitter can do format conversions, configure format for its output port: */
for (i = 0; i < splitter->output_num; i++)
{
mmal_format_copy(splitter->output[i]->format, splitter->input[0]->format);
if (i == SPLITTER_OUTPUT_PORT)
{
format = splitter->output[i]->format;
switch (state->raw_output_fmt)
{
case RAW_OUTPUT_FMT_YUV:
case RAW_OUTPUT_FMT_GRAY: /* Grayscale image contains only luma (Y) component */
format->encoding = MMAL_ENCODING_I420;
format->encoding_variant = MMAL_ENCODING_I420;
break;
case RAW_OUTPUT_FMT_RGB:
if (mmal_util_rgb_order_fixed(state->camera_component->output[MMAL_CAMERA_CAPTURE_PORT]))
format->encoding = MMAL_ENCODING_RGB24;
else
format->encoding = MMAL_ENCODING_BGR24;
format->encoding_variant = 0; /* Irrelevant when not in opaque mode */
break;
default:
status = MMAL_EINVAL;
vcos_log_error("unknown raw output format");
goto error;
}
}
status = mmal_port_format_commit(splitter->output[i]);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set format on splitter output port %d", i);
goto error;
}
}
/* Enable component */
status = mmal_component_enable(splitter);
if (status != MMAL_SUCCESS)
{
vcos_log_error("splitter component couldn't be enabled");
goto error;
}
/* Create pool of buffer headers for the output port to consume */
splitter_output = splitter->output[SPLITTER_OUTPUT_PORT];
pool = mmal_port_pool_create(splitter_output, splitter_output->buffer_num, splitter_output->buffer_size);
if (!pool)
{
vcos_log_error("Failed to create buffer header pool for splitter output port %s", splitter_output->name);
}
state->splitter_pool = pool;
state->splitter_component = splitter;
if (state->verbose)
fprintf(stderr, "Splitter component done\n");
return status;
error:
if (splitter)
mmal_component_destroy(splitter);
return status;
}
/**
* Destroy the splitter component
*
* @param state Pointer to state control struct
*
*/
static void destroy_splitter_component(RASPIVID_STATE *state)
{
// Get rid of any port buffers first
if (state->splitter_pool)
{
mmal_port_pool_destroy(state->splitter_component->output[SPLITTER_OUTPUT_PORT], state->splitter_pool);
}
if (state->splitter_component)
{
mmal_component_destroy(state->splitter_component);
state->splitter_component = NULL;
}
}
/**
* Create the encoder component, set up its ports
*
* @param state Pointer to state control struct
*
* @return MMAL_SUCCESS if all OK, something else otherwise
*
*/
static MMAL_STATUS_T create_encoder_component(RASPIVID_STATE *state)
{
MMAL_COMPONENT_T *encoder = 0;
MMAL_PORT_T *encoder_input = NULL, *encoder_output = NULL;
MMAL_STATUS_T status;
MMAL_POOL_T *pool;
status = mmal_component_create(MMAL_COMPONENT_DEFAULT_VIDEO_ENCODER, &encoder);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to create video encoder component");
goto error;
}
if (!encoder->input_num || !encoder->output_num)
{
status = MMAL_ENOSYS;
vcos_log_error("Video encoder doesn't have input/output ports");
goto error;
}
encoder_input = encoder->input[0];
encoder_output = encoder->output[0];
// We want same format on input and output
mmal_format_copy(encoder_output->format, encoder_input->format);
// Only supporting H264 at the moment
encoder_output->format->encoding = state->encoding;
if(state->encoding == MMAL_ENCODING_H264)
{
if(state->level == MMAL_VIDEO_LEVEL_H264_4)
{
if(state->bitrate > MAX_BITRATE_LEVEL4)
{
fprintf(stderr, "Bitrate too high: Reducing to 25MBit/s\n");
state->bitrate = MAX_BITRATE_LEVEL4;
}
}
else
{
if(state->bitrate > MAX_BITRATE_LEVEL42)
{
fprintf(stderr, "Bitrate too high: Reducing to 62.5MBit/s\n");
state->bitrate = MAX_BITRATE_LEVEL42;
}
}
}
else if(state->encoding == MMAL_ENCODING_MJPEG)
{
if(state->bitrate > MAX_BITRATE_MJPEG)
{
fprintf(stderr, "Bitrate too high: Reducing to 25MBit/s\n");
state->bitrate = MAX_BITRATE_MJPEG;
}
}
encoder_output->format->bitrate = state->bitrate;
if (state->encoding == MMAL_ENCODING_H264)
encoder_output->buffer_size = encoder_output->buffer_size_recommended;
else
encoder_output->buffer_size = 256<<10;
if (encoder_output->buffer_size < encoder_output->buffer_size_min)
encoder_output->buffer_size = encoder_output->buffer_size_min;
encoder_output->buffer_num = encoder_output->buffer_num_recommended;
if (encoder_output->buffer_num < encoder_output->buffer_num_min)
encoder_output->buffer_num = encoder_output->buffer_num_min;
// We need to set the frame rate on output to 0, to ensure it gets
// updated correctly from the input framerate when port connected
encoder_output->format->es->video.frame_rate.num = 0;
encoder_output->format->es->video.frame_rate.den = 1;
// Commit the port changes to the output port
status = mmal_port_format_commit(encoder_output);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set format on video encoder output port");
goto error;
}
// Set the rate control parameter
if (0)
{
MMAL_PARAMETER_VIDEO_RATECONTROL_T param = {{ MMAL_PARAMETER_RATECONTROL, sizeof(param)}, MMAL_VIDEO_RATECONTROL_DEFAULT};
status = mmal_port_parameter_set(encoder_output, &param.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set ratecontrol");
goto error;
}
}
if (state->encoding == MMAL_ENCODING_H264 &&
state->intraperiod != -1)
{
MMAL_PARAMETER_UINT32_T param = {{ MMAL_PARAMETER_INTRAPERIOD, sizeof(param)}, state->intraperiod};
status = mmal_port_parameter_set(encoder_output, &param.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set intraperiod");
goto error;
}
}
if (state->encoding == MMAL_ENCODING_H264 &&
state->quantisationParameter)
{
MMAL_PARAMETER_UINT32_T param = {{ MMAL_PARAMETER_VIDEO_ENCODE_INITIAL_QUANT, sizeof(param)}, state->quantisationParameter};
status = mmal_port_parameter_set(encoder_output, &param.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set initial QP");
goto error;
}
MMAL_PARAMETER_UINT32_T param2 = {{ MMAL_PARAMETER_VIDEO_ENCODE_MIN_QUANT, sizeof(param)}, state->quantisationParameter};
status = mmal_port_parameter_set(encoder_output, &param2.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set min QP");
goto error;
}
MMAL_PARAMETER_UINT32_T param3 = {{ MMAL_PARAMETER_VIDEO_ENCODE_MAX_QUANT, sizeof(param)}, state->quantisationParameter};
status = mmal_port_parameter_set(encoder_output, &param3.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set max QP");
goto error;
}
}
if (state->encoding == MMAL_ENCODING_H264)
{
MMAL_PARAMETER_VIDEO_PROFILE_T param;
param.hdr.id = MMAL_PARAMETER_PROFILE;
param.hdr.size = sizeof(param);
param.profile[0].profile = state->profile;
if((VCOS_ALIGN_UP(state->width,16) >> 4) * (VCOS_ALIGN_UP(state->height,16) >> 4) * state->framerate > 245760)
{
if((VCOS_ALIGN_UP(state->width,16) >> 4) * (VCOS_ALIGN_UP(state->height,16) >> 4) * state->framerate <= 522240)
{
fprintf(stderr, "Too many macroblocks/s: Increasing H264 Level to 4.2\n");
state->level=MMAL_VIDEO_LEVEL_H264_42;
}
else
{
vcos_log_error("Too many macroblocks/s requested");
goto error;
}
}
param.profile[0].level = state->level;
status = mmal_port_parameter_set(encoder_output, &param.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set H264 profile");
goto error;
}
}
if (mmal_port_parameter_set_boolean(encoder_input, MMAL_PARAMETER_VIDEO_IMMUTABLE_INPUT, state->immutableInput) != MMAL_SUCCESS)
{
vcos_log_error("Unable to set immutable input flag");
// Continue rather than abort..
}
//set INLINE HEADER flag to generate SPS and PPS for every IDR if requested
if (mmal_port_parameter_set_boolean(encoder_output, MMAL_PARAMETER_VIDEO_ENCODE_INLINE_HEADER, state->bInlineHeaders) != MMAL_SUCCESS)
{
vcos_log_error("failed to set INLINE HEADER FLAG parameters");
// Continue rather than abort..
}
//set INLINE VECTORS flag to request motion vector estimates
if (state->encoding == MMAL_ENCODING_H264 &&
mmal_port_parameter_set_boolean(encoder_output, MMAL_PARAMETER_VIDEO_ENCODE_INLINE_VECTORS, state->inlineMotionVectors) != MMAL_SUCCESS)
{
vcos_log_error("failed to set INLINE VECTORS parameters");
// Continue rather than abort..
}
// Adaptive intra refresh settings
if (state->encoding == MMAL_ENCODING_H264 &&
state->intra_refresh_type != -1)
{
MMAL_PARAMETER_VIDEO_INTRA_REFRESH_T param;
param.hdr.id = MMAL_PARAMETER_VIDEO_INTRA_REFRESH;
param.hdr.size = sizeof(param);
// Get first so we don't overwrite anything unexpectedly
status = mmal_port_parameter_get(encoder_output, &param.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_warn("Unable to get existing H264 intra-refresh values. Please update your firmware");
// Set some defaults, don't just pass random stack data
param.air_mbs = param.air_ref = param.cir_mbs = param.pir_mbs = 0;
}
param.refresh_mode = state->intra_refresh_type;
//if (state->intra_refresh_type == MMAL_VIDEO_INTRA_REFRESH_CYCLIC_MROWS)
// param.cir_mbs = 10;
status = mmal_port_parameter_set(encoder_output, &param.hdr);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to set H264 intra-refresh values");
goto error;
}
}
// Enable component
status = mmal_component_enable(encoder);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Unable to enable video encoder component");
goto error;
}
/* Create pool of buffer headers for the output port to consume */
pool = mmal_port_pool_create(encoder_output, encoder_output->buffer_num, encoder_output->buffer_size);
if (!pool)
{
vcos_log_error("Failed to create buffer header pool for encoder output port %s", encoder_output->name);
}
state->encoder_pool = pool;
state->encoder_component = encoder;
if (state->verbose)
fprintf(stderr, "Encoder component done\n");
return status;
error:
if (encoder)
mmal_component_destroy(encoder);
state->encoder_component = NULL;
return status;
}
/**
* Destroy the encoder component
*
* @param state Pointer to state control struct
*
*/
static void destroy_encoder_component(RASPIVID_STATE *state)
{
// Get rid of any port buffers first
if (state->encoder_pool)
{
mmal_port_pool_destroy(state->encoder_component->output[0], state->encoder_pool);
}
if (state->encoder_component)
{
mmal_component_destroy(state->encoder_component);
state->encoder_component = NULL;
}
}
/**
* Connect two specific ports together
*
* @param output_port Pointer the output port
* @param input_port Pointer the input port
* @param Pointer to a mmal connection pointer, reassigned if function successful
* @return Returns a MMAL_STATUS_T giving result of operation
*
*/
static MMAL_STATUS_T connect_ports(MMAL_PORT_T *output_port, MMAL_PORT_T *input_port, MMAL_CONNECTION_T **connection)
{
MMAL_STATUS_T status;
status = mmal_connection_create(connection, output_port, input_port, MMAL_CONNECTION_FLAG_TUNNELLING | MMAL_CONNECTION_FLAG_ALLOCATION_ON_INPUT);
if (status == MMAL_SUCCESS)
{
status = mmal_connection_enable(*connection);
if (status != MMAL_SUCCESS)
mmal_connection_destroy(*connection);
}
return status;
}
/**
* Checks if specified port is valid and enabled, then disables it
*
* @param port Pointer the port
*
*/
static void check_disable_port(MMAL_PORT_T *port)
{
if (port && port->is_enabled)
mmal_port_disable(port);
}
/**
* Handler for sigint signals
*
* @param signal_number ID of incoming signal.
*
*/
static void signal_handler(int signal_number)
{
if (signal_number == SIGUSR1)
{
// Handle but ignore - prevents us dropping out if started in none-signal mode
// and someone sends us the USR1 signal anyway
}
else
{
// Going to abort on all other signals
vcos_log_error("Aborting program\n");
exit(130);
}
}
/**
* Pause for specified time, but return early if detect an abort request
*
* @param state Pointer to state control struct
* @param pause Time in ms to pause
* @param callback Struct contain an abort flag tested for early termination
*
*/
static int pause_and_test_abort(RASPIVID_STATE *state, int pause)
{
int wait;
if (!pause)
return 0;
// Going to check every ABORT_INTERVAL milliseconds
for (wait = 0; wait < pause; wait+= ABORT_INTERVAL)
{
vcos_sleep(ABORT_INTERVAL);
if (state->callback_data.abort)
return 1;
}
return 0;
}
/**
* Function to wait in various ways (depending on settings)
*
* @param state Pointer to the state data
*
* @return !0 if to continue, 0 if reached end of run
*/
static int wait_for_next_change(RASPIVID_STATE *state)
{
int keep_running = 1;
static int64_t complete_time = -1;
// Have we actually exceeded our timeout?
int64_t current_time = vcos_getmicrosecs64()/1000;
if (complete_time == -1)
complete_time = current_time + state->timeout;
// if we have run out of time, flag we need to exit
if (current_time >= complete_time && state->timeout != 0)
keep_running = 0;
switch (state->waitMethod)
{
case WAIT_METHOD_NONE:
(void)pause_and_test_abort(state, state->timeout);
return 0;
case WAIT_METHOD_FOREVER:
{
// We never return from this. Expect a ctrl-c to exit.
while (1)
// Have a sleep so we don't hog the CPU.
vcos_sleep(10000);
return 0;
}
case WAIT_METHOD_TIMED:
{
int abort;
if (state->bCapturing)
abort = pause_and_test_abort(state, state->onTime);
else
abort = pause_and_test_abort(state, state->offTime);
if (abort)
return 0;
else
return keep_running;
}
case WAIT_METHOD_KEYPRESS:
{
char ch;
if (state->verbose)
fprintf(stderr, "Press Enter to %s, X then ENTER to exit\n", state->bCapturing ? "pause" : "capture");
ch = getchar();
if (ch == 'x' || ch == 'X')
return 0;
else
return keep_running;
}
case WAIT_METHOD_SIGNAL:
{
// Need to wait for a SIGUSR1 signal
sigset_t waitset;
int sig;
int result = 0;
sigemptyset( &waitset );
sigaddset( &waitset, SIGUSR1 );
// We are multi threaded because we use mmal, so need to use the pthread
// variant of procmask to block SIGUSR1 so we can wait on it.
pthread_sigmask( SIG_BLOCK, &waitset, NULL );
if (state->verbose)
{
fprintf(stderr, "Waiting for SIGUSR1 to %s\n", state->bCapturing ? "pause" : "capture");
}
result = sigwait( &waitset, &sig );
if (state->verbose && result != 0)
fprintf(stderr, "Bad signal received - error %d\n", errno);
return keep_running;
}
} // switch
return keep_running;
}
/**
* main
*/
int main(int argc, const char **argv)
{
// Our main data storage vessel..
RASPIVID_STATE state;
int exit_code = EX_OK;
MMAL_STATUS_T status = MMAL_SUCCESS;
MMAL_PORT_T *camera_preview_port = NULL;
MMAL_PORT_T *camera_video_port = NULL;
MMAL_PORT_T *camera_still_port = NULL;
MMAL_PORT_T *preview_input_port = NULL;
MMAL_PORT_T *encoder_input_port = NULL;
MMAL_PORT_T *encoder_output_port = NULL;
MMAL_PORT_T *splitter_input_port = NULL;
MMAL_PORT_T *splitter_output_port = NULL;
MMAL_PORT_T *splitter_preview_port = NULL;
bcm_host_init();
// Register our application with the logging system
vcos_log_register("RaspiVid", VCOS_LOG_CATEGORY);
signal(SIGINT, signal_handler);
// Disable USR1 for the moment - may be reenabled if go in to signal capture mode
signal(SIGUSR1, SIG_IGN);
default_status(&state);
// Do we have any parameters
if (argc == 1)
{
fprintf(stdout, "\n%s Camera App %s\n\n", basename(argv[0]), VERSION_STRING);
display_valid_parameters(basename(argv[0]));
exit(EX_USAGE);
}
// Parse the command line and put options in to our status structure
if (parse_cmdline(argc, argv, &state))
{
status = -1;
exit(EX_USAGE);
}
if (state.verbose)
{
fprintf(stderr, "\n%s Camera App %s\n\n", basename(argv[0]), VERSION_STRING);
dump_status(&state);
}
// OK, we have a nice set of parameters. Now set up our components
// We have three components. Camera, Preview and encoder.
if ((status = create_camera_component(&state)) != MMAL_SUCCESS)
{
vcos_log_error("%s: Failed to create camera component", __func__);
exit_code = EX_SOFTWARE;
}
else if ((status = raspipreview_create(&state.preview_parameters)) != MMAL_SUCCESS)
{
vcos_log_error("%s: Failed to create preview component", __func__);
destroy_camera_component(&state);
exit_code = EX_SOFTWARE;
}
else if ((status = create_encoder_component(&state)) != MMAL_SUCCESS)
{
vcos_log_error("%s: Failed to create encode component", __func__);
raspipreview_destroy(&state.preview_parameters);
destroy_camera_component(&state);
exit_code = EX_SOFTWARE;
}
else if (state.raw_output && (status = create_splitter_component(&state)) != MMAL_SUCCESS)
{
vcos_log_error("%s: Failed to create splitter component", __func__);
raspipreview_destroy(&state.preview_parameters);
destroy_camera_component(&state);
destroy_encoder_component(&state);
exit_code = EX_SOFTWARE;
}
else
{
if (state.verbose)
fprintf(stderr, "Starting component connection stage\n");
camera_preview_port = state.camera_component->output[MMAL_CAMERA_PREVIEW_PORT];
camera_video_port = state.camera_component->output[MMAL_CAMERA_VIDEO_PORT];
camera_still_port = state.camera_component->output[MMAL_CAMERA_CAPTURE_PORT];
preview_input_port = state.preview_parameters.preview_component->input[0];
encoder_input_port = state.encoder_component->input[0];
encoder_output_port = state.encoder_component->output[0];
if (state.raw_output)
{
splitter_input_port = state.splitter_component->input[0];
splitter_output_port = state.splitter_component->output[SPLITTER_OUTPUT_PORT];
splitter_preview_port = state.splitter_component->output[SPLITTER_PREVIEW_PORT];
}
if (state.preview_parameters.wantPreview )
{
if (state.raw_output)
{
if (state.verbose)
fprintf(stderr, "Connecting camera preview port to splitter input port\n");
// Connect camera to splitter
status = connect_ports(camera_preview_port, splitter_input_port, &state.splitter_connection);
if (status != MMAL_SUCCESS)
{
state.splitter_connection = NULL;
vcos_log_error("%s: Failed to connect camera preview port to splitter input", __func__);
goto error;
}
if (state.verbose)
{
fprintf(stderr, "Connecting splitter preview port to preview input port\n");
fprintf(stderr, "Starting video preview\n");
}
// Connect splitter to preview
status = connect_ports(splitter_preview_port, preview_input_port, &state.preview_connection);
}
else
{
if (state.verbose)
{
fprintf(stderr, "Connecting camera preview port to preview input port\n");
fprintf(stderr, "Starting video preview\n");
}
// Connect camera to preview
status = connect_ports(camera_preview_port, preview_input_port, &state.preview_connection);
}
if (status != MMAL_SUCCESS)
state.preview_connection = NULL;
}
else
{
if (state.raw_output)
{
if (state.verbose)
fprintf(stderr, "Connecting camera preview port to splitter input port\n");
// Connect camera to splitter
status = connect_ports(camera_preview_port, splitter_input_port, &state.splitter_connection);
if (status != MMAL_SUCCESS)
{
state.splitter_connection = NULL;
vcos_log_error("%s: Failed to connect camera preview port to splitter input", __func__);
goto error;
}
}
else
{
status = MMAL_SUCCESS;
}
}
if (status == MMAL_SUCCESS)
{
if (state.verbose)
fprintf(stderr, "Connecting camera video port to encoder input port\n");
// Now connect the camera to the encoder
status = connect_ports(camera_video_port, encoder_input_port, &state.encoder_connection);
if (status != MMAL_SUCCESS)
{
state.encoder_connection = NULL;
vcos_log_error("%s: Failed to connect camera video port to encoder input", __func__);
goto error;
}
}
if (status == MMAL_SUCCESS)
{
// Set up our userdata - this is passed though to the callback where we need the information.
state.callback_data.pstate = &state;
state.callback_data.abort = 0;
if (state.raw_output)
{
splitter_output_port->userdata = (struct MMAL_PORT_USERDATA_T *)&state.callback_data;
if (state.verbose)
fprintf(stderr, "Enabling splitter output port\n");
// Enable the splitter output port and tell it its callback function
status = mmal_port_enable(splitter_output_port, splitter_buffer_callback);
if (status != MMAL_SUCCESS)
{
vcos_log_error("%s: Failed to setup splitter output port", __func__);
goto error;
}
}
state.callback_data.file_handle = NULL;
if (state.filename)
{
if (state.filename[0] == '-')
{
state.callback_data.file_handle = stdout;
// Ensure we don't upset the output stream with diagnostics/info
state.verbose = 0;
}
else
{
state.callback_data.file_handle = open_filename(&state, state.filename);
}
if (!state.callback_data.file_handle)
{
// Notify user, carry on but discarding encoded output buffers
vcos_log_error("%s: Error opening output file: %s\nNo output file will be generated\n", __func__, state.filename);
}
}
state.callback_data.imv_file_handle = NULL;
if (state.imv_filename)
{
if (state.imv_filename[0] == '-')
{
state.callback_data.imv_file_handle = stdout;
}
else
{
state.callback_data.imv_file_handle = open_filename(&state, state.imv_filename);
}
if (!state.callback_data.imv_file_handle)
{
// Notify user, carry on but discarding encoded output buffers
fprintf(stderr, "Error opening output file: %s\nNo output file will be generated\n",state.imv_filename);
state.inlineMotionVectors=0;
}
}
state.callback_data.pts_file_handle = NULL;
if (state.pts_filename)
{
if (state.pts_filename[0] == '-')
{
state.callback_data.pts_file_handle = stdout;
}
else
{
state.callback_data.pts_file_handle = open_filename(&state, state.pts_filename);
if (state.callback_data.pts_file_handle) /* save header for mkvmerge */
fprintf(state.callback_data.pts_file_handle, "# timecode format v2\n");
}
if (!state.callback_data.pts_file_handle)
{
// Notify user, carry on but discarding encoded output buffers
fprintf(stderr, "Error opening output file: %s\nNo output file will be generated\n",state.pts_filename);
state.save_pts=0;
}
}
state.callback_data.clockrt_file_handle = NULL;
if (state.clock_realtime_filename)
{
if (state.clock_realtime_filename[0] == '-')
{
state.callback_data.clockrt_file_handle = stdout;
}
else
{
state.callback_data.clockrt_file_handle = open_filename(&state, state.clock_realtime_filename);
if (state.callback_data.clockrt_file_handle) /* save header for mkvmerge */
fprintf(state.callback_data.clockrt_file_handle, "# CLOCK_REALTIME\n");
}
if (!state.callback_data.clockrt_file_handle)
{
// Notify user, carry on but discarding encoded output buffers
fprintf(stderr, "Error opening output file: %s\nNo output file will be generated\n",state.clock_realtime_filename);
state.save_clock_realtime=0;
}
}
state.callback_data.datetime_file_handle = NULL;
if (state.datetime_linux_filename)
{
if (state.datetime_linux_filename[0] == '-')
{
state.callback_data.datetime_file_handle = stdout;
}
else
{
state.callback_data.datetime_file_handle = open_filename(&state, state.datetime_linux_filename);
if (state.callback_data.datetime_file_handle) /* save header for mkvmerge */
fprintf(state.callback_data.datetime_file_handle, "# Date Time v2\n");
}
if (!state.callback_data.datetime_file_handle)
{
// Notify user, carry on but discarding encoded output buffers
fprintf(stderr, "Error opening output file: %s\nNo output file will be generated\n",state.datetime_linux_filename);
state.save_datetime_linux=0;
}
}
state.callback_data.raw_file_handle = NULL;
if (state.raw_filename)
{
if (state.raw_filename[0] == '-')
{
state.callback_data.raw_file_handle = stdout;
}
else
{
state.callback_data.raw_file_handle = open_filename(&state, state.raw_filename);
}
if (!state.callback_data.raw_file_handle)
{
// Notify user, carry on but discarding encoded output buffers
fprintf(stderr, "Error opening output file: %s\nNo output file will be generated\n", state.raw_filename);
state.raw_output = 0;
}
}
if(state.bCircularBuffer)
{
if(state.bitrate == 0)
{
vcos_log_error("%s: Error circular buffer requires constant bitrate and small intra period\n", __func__);
goto error;
}
else if(state.timeout == 0)
{
vcos_log_error("%s: Error, circular buffer size is based on timeout must be greater than zero\n", __func__);
goto error;
}
else if(state.waitMethod != WAIT_METHOD_KEYPRESS && state.waitMethod != WAIT_METHOD_SIGNAL)
{
vcos_log_error("%s: Error, Circular buffer mode requires either keypress (-k) or signal (-s) triggering\n", __func__);
goto error;
}
else if(!state.callback_data.file_handle)
{
vcos_log_error("%s: Error require output file (or stdout) for Circular buffer mode\n", __func__);
goto error;
}
else
{
int count = state.bitrate * (state.timeout / 1000) / 8;
state.callback_data.cb_buff = (char *) malloc(count);
if(state.callback_data.cb_buff == NULL)
{
vcos_log_error("%s: Unable to allocate circular buffer for %d seconds at %.1f Mbits\n", __func__, state.timeout / 1000, (double)state.bitrate/1000000.0);
goto error;
}
else
{
state.callback_data.cb_len = count;
state.callback_data.cb_wptr = 0;
state.callback_data.cb_wrap = 0;
state.callback_data.cb_data = 0;
state.callback_data.iframe_buff_wpos = 0;
state.callback_data.iframe_buff_rpos = 0;
state.callback_data.header_wptr = 0;
}
}
}
// Set up our userdata - this is passed though to the callback where we need the information.
encoder_output_port->userdata = (struct MMAL_PORT_USERDATA_T *)&state.callback_data;
if (state.verbose)
fprintf(stderr, "Enabling encoder output port\n");
// Enable the encoder output port and tell it its callback function
status = mmal_port_enable(encoder_output_port, encoder_buffer_callback);
if (status != MMAL_SUCCESS)
{
vcos_log_error("Failed to setup encoder output");
goto error;
}
if (state.demoMode)
{
// Run for the user specific time..
int num_iterations = state.timeout / state.demoInterval;
int i;
if (state.verbose)
fprintf(stderr, "Running in demo mode\n");
for (i=0;state.timeout == 0 || i<num_iterations;i++)
{
raspicamcontrol_cycle_test(state.camera_component);
vcos_sleep(state.demoInterval);
}
}
else
{
// Only encode stuff if we have a filename and it opened
// Note we use the copy in the callback, as the call back MIGHT change the file handle
if (state.callback_data.file_handle)
{
int running = 1;
// Send all the buffers to the encoder output port
{
int num = mmal_queue_length(state.encoder_pool->queue);
int q;
for (q=0;q<num;q++)
{
MMAL_BUFFER_HEADER_T *buffer = mmal_queue_get(state.encoder_pool->queue);
if (!buffer)
vcos_log_error("Unable to get a required buffer %d from pool queue", q);
if (mmal_port_send_buffer(encoder_output_port, buffer)!= MMAL_SUCCESS)
vcos_log_error("Unable to send a buffer to encoder output port (%d)", q);
}
}
// Send all the buffers to the splitter output port
if (state.raw_output) {
int num = mmal_queue_length(state.splitter_pool->queue);
int q;
for (q = 0; q < num; q++)
{
MMAL_BUFFER_HEADER_T *buffer = mmal_queue_get(state.splitter_pool->queue);
if (!buffer)
vcos_log_error("Unable to get a required buffer %d from pool queue", q);
if (mmal_port_send_buffer(splitter_output_port, buffer)!= MMAL_SUCCESS)
vcos_log_error("Unable to send a buffer to splitter output port (%d)", q);
}
}
int initialCapturing=state.bCapturing;
while (running)
{
// Change state
state.bCapturing = !state.bCapturing;
if (mmal_port_parameter_set_boolean(camera_video_port, MMAL_PARAMETER_CAPTURE, state.bCapturing) != MMAL_SUCCESS)
{
// How to handle?
}
// In circular buffer mode, exit and save the buffer (make sure we do this after having paused the capture
if(state.bCircularBuffer && !state.bCapturing)
{
break;
}
if (state.verbose)
{
if (state.bCapturing)
fprintf(stderr, "Starting video capture\n");
else
fprintf(stderr, "Pausing video capture\n");
}
if(state.splitWait)
{
if(state.bCapturing)
{
if (mmal_port_parameter_set_boolean(encoder_output_port, MMAL_PARAMETER_VIDEO_REQUEST_I_FRAME, 1) != MMAL_SUCCESS)
{
vcos_log_error("failed to request I-FRAME");
}
}
else
{
if(!initialCapturing)
state.splitNow=1;
}
initialCapturing=0;
}
running = wait_for_next_change(&state);
}
if (state.verbose)
fprintf(stderr, "Finished capture\n");
}
else
{
if (state.timeout)
vcos_sleep(state.timeout);
else
{
// timeout = 0 so run forever
while(1)
vcos_sleep(ABORT_INTERVAL);
}
}
}
}
else
{
mmal_status_to_int(status);
vcos_log_error("%s: Failed to connect camera to preview", __func__);
}
if(state.bCircularBuffer)
{
int copy_from_end, copy_from_start;
copy_from_end = state.callback_data.cb_len - state.callback_data.iframe_buff[state.callback_data.iframe_buff_rpos];
copy_from_start = state.callback_data.cb_len - copy_from_end;
copy_from_start = state.callback_data.cb_wptr < copy_from_start ? state.callback_data.cb_wptr : copy_from_start;
if(!state.callback_data.cb_wrap)
{
copy_from_start = state.callback_data.cb_wptr;
copy_from_end = 0;
}
fwrite(state.callback_data.header_bytes, 1, state.callback_data.header_wptr, state.callback_data.file_handle);
// Save circular buffer
fwrite(state.callback_data.cb_buff + state.callback_data.iframe_buff[state.callback_data.iframe_buff_rpos], 1, copy_from_end, state.callback_data.file_handle);
fwrite(state.callback_data.cb_buff, 1, copy_from_start, state.callback_data.file_handle);
if(state.callback_data.flush_buffers) fflush(state.callback_data.file_handle);
}
error:
mmal_status_to_int(status);
if (state.verbose)
fprintf(stderr, "Closing down\n");
// Disable all our ports that are not handled by connections
check_disable_port(camera_still_port);
check_disable_port(encoder_output_port);
check_disable_port(splitter_output_port);
if (state.preview_parameters.wantPreview && state.preview_connection)
mmal_connection_destroy(state.preview_connection);
if (state.encoder_connection)
mmal_connection_destroy(state.encoder_connection);
if (state.splitter_connection)
mmal_connection_destroy(state.splitter_connection);
// Can now close our file. Note disabling ports may flush buffers which causes
// problems if we have already closed the file!
if (state.callback_data.file_handle && state.callback_data.file_handle != stdout)
fclose(state.callback_data.file_handle);
if (state.callback_data.imv_file_handle && state.callback_data.imv_file_handle != stdout)
fclose(state.callback_data.imv_file_handle);
if (state.callback_data.pts_file_handle && state.callback_data.pts_file_handle != stdout)
fclose(state.callback_data.pts_file_handle);
if (state.callback_data.clockrt_file_handle && state.callback_data.clockrt_file_handle != stdout)
fclose(state.callback_data.clockrt_file_handle);
if (state.callback_data.datetime_file_handle && state.callback_data.datetime_file_handle != stdout)
fclose(state.callback_data.datetime_file_handle);
if (state.callback_data.raw_file_handle && state.callback_data.raw_file_handle != stdout)
fclose(state.callback_data.raw_file_handle);
/* Disable components */
if (state.encoder_component)
mmal_component_disable(state.encoder_component);
if (state.preview_parameters.preview_component)
mmal_component_disable(state.preview_parameters.preview_component);
if (state.splitter_component)
mmal_component_disable(state.splitter_component);
if (state.camera_component)
mmal_component_disable(state.camera_component);
destroy_encoder_component(&state);
raspipreview_destroy(&state.preview_parameters);
destroy_splitter_component(&state);
destroy_camera_component(&state);
if (state.verbose)
fprintf(stderr, "Close down completed, all components disconnected, disabled and destroyed\n\n");
}
if (status != MMAL_SUCCESS)
raspicamcontrol_check_configuration(128);
return exit_code;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment