Skip to content

Instantly share code, notes, and snippets.

@tspspi

tspspi/Makefile Secret

Last active February 5, 2021 23:50
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save tspspi/d14696d1379cad7d6b59fe4edcd69f4d to your computer and use it in GitHub Desktop.
Save tspspi/d14696d1379cad7d6b59fe4edcd69f4d to your computer and use it in GitHub Desktop.
Simple webcam sample using V4L2

Simple webcam sample using V4L2

This is a simple example on how one can access the USB webcam using the V4L2 API (on FreeBSD). Note that this code acts as an inefficient example and not as a utility program. Therefor the application is separated into different logical steps.

The captures frames are then stored in a series of image files.

Compiling the application is simple using GNU make, i.e. simply typing gmake. Note that this program requires libjpeg to be installed on the machine.

Invocing the application using

./webcamsample /dev/video0 /tmp/capture 3

would write the first 3 captured frames from /dev/video0 into the files /tmp/capture-0.jpg to /tmp/capture-2.jpg.

webcamsample: webcamsample.c
clang -o webcamsample -Wall -ansi -std=c99 -DDEBUG -I/usr/local/include webcamsample.c -L/usr/local/lib -ljpeg
/*
Simple capture program tries to capture a specified number of
frames and stores them into a series of target files
This program is *not* implemented to be used in any production
setting, it's just written as a simple example on how to use the
V4L2 API to capture frames using the webcam. This also is the
reason for it's structure that's far of from any clean software
development pattern.
*/
#include <linux/videodev2.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <errno.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <sys/event.h>
#include <jpeglib.h>
#include <jerror.h>
#ifndef __cplusplus
typedef int bool;
#define true 1
#define false 0
#endif
static void printUsage(char* argv[]) {
printf("Usage: %s CAPDEV TARGETFILES [NUMFRAMES]\n", argv[0]);
printf("\n");
printf("Captures the specified number of frames - or only a single frame if the\n");
printf("argument is omitted\n");
printf("\n");
printf("Arguments:\n");
printf("\tCAPDEV\n\t\tCapture device (for example /dev/video0)\n");
printf("\tTARGETFILES\n\t\tTarget filename prefix excluding the extension\n");
printf("\tNUMFRAMES\n\t\tNumber of frames to capture (if omitted capture a single frame\n\n");
}
enum cameraError {
cameraE_Ok,
cameraE_Failed,
cameraE_InvalidParam,
cameraE_UnknownDevice,
cameraE_PermissionDenied,
};
struct imageBuffer {
void* lpBase;
size_t sLen;
};
struct imgRawImage {
unsigned int numComponents;
unsigned long int width, height;
unsigned char* lpData;
};
/*
Write one image into a target file
See https://www.tspi.at/2020/03/20/libjpegexample.html
*/
static int storeJpegImageFile(struct imgRawImage* lpImage, char* lpFilename) {
struct jpeg_compress_struct info;
struct jpeg_error_mgr err;
unsigned char* lpRowBuffer[1];
FILE* fHandle;
fHandle = fopen(lpFilename, "wb");
if(fHandle == NULL) {
#ifdef DEBUG
fprintf(stderr, "%s:%u Failed to open output file %s\n", __FILE__, __LINE__, lpFilename);
#endif
return 1;
}
info.err = jpeg_std_error(&err);
jpeg_create_compress(&info);
jpeg_stdio_dest(&info, fHandle);
info.image_width = lpImage->width;
info.image_height = lpImage->height;
info.input_components = 3;
info.in_color_space = JCS_RGB;
jpeg_set_defaults(&info);
jpeg_set_quality(&info, 100, TRUE);
jpeg_start_compress(&info, TRUE);
/* Write every scanline ... */
while(info.next_scanline < info.image_height) {
lpRowBuffer[0] = &(lpImage->lpData[info.next_scanline * (lpImage->width * 3)]);
jpeg_write_scanlines(&info, lpRowBuffer, 1);
}
jpeg_finish_compress(&info);
fclose(fHandle);
jpeg_destroy_compress(&info);
return 0;
}
/*
Wrapper around ioctl that repeats the calls in case
they are interrupted by a signal (i.e. restarts) until
they succeed or fail.
*/
static int xioctl(int fh, int request, void* arg) {
int r;
do {
r = ioctl(fh, request, arg);
} while((r == -1) && (errno == EINTR));
return r;
}
/*
Open the device file (first check it exists, is
a device file, etc.)
*/
static char* deviceOpen_DefaultFilename = "/dev/video0";
static enum cameraError deviceOpen(
int* lpDeviceOut,
char* deviceName
) {
struct stat st;
int hHandle;
if(lpDeviceOut == NULL) {
return cameraE_InvalidParam;
}
(*lpDeviceOut) = -1;
if(deviceName == NULL) {
deviceName = deviceOpen_DefaultFilename;
}
/*
First check that the file exists and that we
are really seeing a device file
*/
if(stat(deviceName, &st) == -1) {
return cameraE_UnknownDevice;
}
if(!S_ISCHR(st.st_mode)) {
return cameraE_UnknownDevice;
}
hHandle = open(deviceName, O_RDWR|O_NONBLOCK, 0);
if(hHandle < 0) {
switch(errno) {
case EACCES: return cameraE_PermissionDenied;
case EPERM: return cameraE_PermissionDenied;
default: return cameraE_Failed;
}
}
(*lpDeviceOut) = hHandle;
return cameraE_Ok;
}
static enum cameraError deviceClose(
int hHandle
) {
if(hHandle < 0) { return cameraE_InvalidParam; }
close(hHandle);
return cameraE_Ok;
}
int main(int argc, char* argv[]) {
enum cameraError e;
int hHandle;
int numFrames = 1;
int kq = -1;
struct imgRawImage* lpRawImg;
if(argc < 3) { printUsage(argv); return 1; }
if(argc > 4) { printUsage(argv); return 1; }
/*
Determine the number of frames we should capture
and validate
*/
if(argc == 4) {
if(sscanf(argv[3], "%d", &numFrames) != 1) {
printUsage(argv);
return 1;
}
}
/*
Try to open the camera
*/
e = deviceOpen(&hHandle, argv[1]);
if(e != cameraE_Ok) {
printf("Failed to open camera\n");
return 2;
}
kq = kqueue();
if(kq == -1) {
printf("%s:%u Failed to create kqueue\n", __FILE__, __LINE__);
return 3;
}
/*
Query capabilities
*/
bool bReadWriteSupported = false;
bool bStreamingSupported = false;
{
struct v4l2_capability cap;
memset(&cap, 0, sizeof(cap));
if(xioctl(hHandle, VIDIOC_QUERYCAP, &cap) == -1) {
printf("%s:%u Failed to query capabilities\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
if((cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) == 0) {
printf("%s:%u Device does not support video capture\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
if((cap.capabilities & V4L2_CAP_READWRITE) != 0) {
bReadWriteSupported = true;
}
if((cap.capabilities & V4L2_CAP_STREAMING) != 0) {
bStreamingSupported = true;
}
}
#ifdef DEBUG
printf("%s:%u Read/Write interface supported: %s\n", __FILE__, __LINE__, (bReadWriteSupported == true) ? "yes" : "no");
printf("%s:%u Streaming interface supported: %s\n", __FILE__, __LINE__, (bStreamingSupported == true) ? "yes" : "no");
#endif
/*
Query cropping capabilities and set cropping rectangle
*/
int defaultWidth = 640;
int defaultHeight = 480;
for(;;) {
struct v4l2_cropcap cropcap;
memset(&cropcap, 0, sizeof(cropcap));
cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(xioctl(hHandle, VIDIOC_CROPCAP, &cropcap) == -1) {
printf("%s:%u Failed to query cropping capabilities, continuing anyways\n", __FILE__, __LINE__);
break;
}
#ifdef DEBUG
printf("Cropping capabilities:\n");
printf("\tDefault boundaries: %d, %d, %d, %d\n", cropcap.defrect.left, cropcap.defrect.top, cropcap.defrect.width, cropcap.defrect.height);
printf("\tBoundaries (left, top, width, height): %d, %d, %d, %d\n", cropcap.bounds.left, cropcap.bounds.top, cropcap.bounds.width, cropcap.bounds.height);
printf("\tAspect ratio: %u : %u\n", cropcap.pixelaspect.numerator, cropcap.pixelaspect.denominator);
#endif
printf("Setting default cropping rectangle ... ");
struct v4l2_crop crop;
crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
crop.c = cropcap.defrect;
if(xioctl(hHandle, VIDIOC_S_CROP, &crop) == -1) {
printf("failed\n");
} else {
printf("ok\n");
}
break;
}
/*
Enumerate all supported formats (even though we'll request 640 x 480
YUYV later on anyways)
*/
{
printf("Doing format negotiation\n");
int idx = 0;
for(idx = 0;; idx = idx + 1) {
struct v4l2_fmtdesc fmt;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.index = idx;
if(xioctl(hHandle, VIDIOC_ENUM_FMT, &fmt) == -1) {
printf("Done enumeration after %u formats\n", idx);
break;
}
printf("\tFormat %u with code %08x (compressed: %s): %s\n", idx, fmt.pixelformat, ((fmt.flags & V4L2_FMT_FLAG_COMPRESSED) != 0) ? "yes" : "no", fmt.description);
}
}
/*
v4l2_format negotiation, we just request the default ones or 640x480
*/
{
struct v4l2_format fmt;
memset(&fmt, 0, sizeof(fmt));
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = defaultWidth;
fmt.fmt.pix.height = defaultHeight;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
if(xioctl(hHandle, VIDIOC_S_FMT, &fmt) == -1) {
printf("%s:%u Format negotiation (S_FMT) failed!\n", __FILE__, __LINE__);
}
/* Now one should query the real size ... */
defaultWidth = fmt.fmt.pix.width;
defaultHeight = fmt.fmt.pix.height;
}
printf("Negotiated width and height: %d x %d\n", defaultWidth, defaultHeight);
/*
Setup buffers
*/
int bufferCount = 2;
{
struct v4l2_requestbuffers rqBuffers;
/*
Request 1 buffer (simple but not seamless, usually use 3+) ...
*/
memset(&rqBuffers, 0, sizeof(rqBuffers));
rqBuffers.count = bufferCount;
rqBuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
rqBuffers.memory = V4L2_MEMORY_MMAP;
if(xioctl(hHandle, VIDIOC_REQBUFS, &rqBuffers) == -1) {
printf("%s:%u Requesting buffers failed!\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
bufferCount = rqBuffers.count;
}
printf("Requested %d buffers\n", bufferCount);
/*
Map buffers
*/
struct imageBuffer* lpBuffers;
{
lpBuffers = calloc(bufferCount, sizeof(struct imageBuffer));
if(lpBuffers == NULL) {
printf("%s:%u Out of memory\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
int iBuf;
for(iBuf = 0; iBuf < bufferCount; iBuf = iBuf + 1) {
struct v4l2_buffer vBuffer;
memset(&vBuffer, 0, sizeof(struct v4l2_buffer));
vBuffer.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
vBuffer.memory = V4L2_MEMORY_MMAP;
vBuffer.index = iBuf;
if(xioctl(hHandle, VIDIOC_QUERYBUF, &vBuffer) == -1) {
printf("%s:%u Failed to query buffer %d\n", __FILE__, __LINE__, iBuf);
deviceClose(hHandle);
return 2;
}
lpBuffers[iBuf].lpBase = mmap(NULL, vBuffer.length, PROT_READ|PROT_WRITE, MAP_SHARED, hHandle, vBuffer.m.offset);
lpBuffers[iBuf].sLen = vBuffer.length;
if(lpBuffers[iBuf].lpBase == MAP_FAILED) {
printf("%s:%u Failed to map buffer %d\n", __FILE__, __LINE__, iBuf);
deviceClose(hHandle);
return 2;
}
}
}
/*
First we queue all buffers
*/
{
int iBuf;
for(iBuf = 0; iBuf < bufferCount; iBuf = iBuf + 1) {
struct v4l2_buffer buf;
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = iBuf;
if(xioctl(hHandle, VIDIOC_QBUF, &buf) == -1) {
printf("%s:%u Queueing buffer %d failed ...\n", __FILE__, __LINE__, iBuf);
deviceClose(hHandle);
return 2;
}
}
}
/*
Add to kqueue ...
*/
{
struct kevent kev;
EV_SET(&kev, hHandle, EVFILT_READ, EV_ADD|EV_ENABLE|EV_CLEAR, 0, 0, NULL);
kevent(kq, &kev, 1, NULL, 0, NULL);
}
/*
Run streaming loop
*/
{
/* Enable streaming */
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(xioctl(hHandle, VIDIOC_STREAMON, &type) == -1) {
printf("%s:%u Stream on failed\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
}
/*
Capture specified number of frames ...
*/
int iFrames = 0;
while(iFrames < numFrames) {
struct kevent kev;
struct v4l2_buffer buf;
int r = kevent(kq, NULL, 0, &kev, 1, NULL);
if(r < 0) {
printf("%s:%u kevent failed\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
if(r > 0) {
/* We got our frame or EOF ... try to dqueue */
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if(xioctl(hHandle, VIDIOC_DQBUF, &buf) == -1) {
if(errno == EAGAIN) { continue; }
printf("%s:%u DQBUF failed\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
printf("%s:%u Dequeued buffer %d\n", __FILE__, __LINE__, buf.index);
/* ToDo: Process image ... */
{
lpRawImg = malloc(sizeof(struct imgRawImage));
if(lpRawImg == NULL) {
printf("%s:%u Out of memory\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
lpRawImg->lpData = malloc(sizeof(unsigned char)*defaultWidth*defaultHeight*3);
if(lpRawImg->lpData == NULL) {
free(lpRawImg);
printf("%s:%u Out of memory\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
/*
Convert the previously requested YUYV (YUV422) image into RGB (RGB888)
YUV422:
4 Byte -> 2 Pixel
RGB888
3 Byte -> 1 Pixel
*/
lpRawImg->numComponents = 3;
lpRawImg->width = defaultWidth;
lpRawImg->height = defaultHeight;
unsigned long int row,col;
for(row = 0; row < defaultHeight; row=row+1) {
for(col = 0; col < defaultWidth; col=col+1) {
unsigned char y0, y1, y;
unsigned char u0, v0;
signed int c,d,e;
unsigned char r,g,b;
signed int rtmp,gtmp, btmp;
y0 = ((unsigned char*)(lpBuffers[buf.index].lpBase))[((col + row * defaultWidth) >> 1)*4 + 0];
u0 = ((unsigned char*)(lpBuffers[buf.index].lpBase))[((col + row * defaultWidth) >> 1)*4 + 1];
y1 = ((unsigned char*)(lpBuffers[buf.index].lpBase))[((col + row * defaultWidth) >> 1)*4 + 2];
v0 = ((unsigned char*)(lpBuffers[buf.index].lpBase))[((col + row * defaultWidth) >> 1)*4 + 3];
if((col + row * defaultWidth) % 2 == 0) {
y = y0;
} else {
y = y1;
}
c = ((signed int)y) - 16;
d = ((signed int)u0) - 128;
e = ((signed int)v0) - 128;
rtmp = ((298 * c + 409 * e + 128) >> 8);
gtmp = ((298 * c - 100 * d - 208 * e + 128) >> 8);
btmp = ((298 * c + 516 * d + 128) >> 8);
if(rtmp < 0) { r = 0; }
else if(rtmp > 255) { r = 255; }
else { r = (unsigned char)rtmp; }
if(gtmp < 0) { g = 0; }
else if(gtmp > 255) { g = 255; }
else { g = (unsigned char)gtmp; }
if(btmp < 0) { b = 0; }
else if(btmp > 255) { b = 255; }
else { b = (unsigned char)btmp; }
lpRawImg->lpData[(col + row*defaultWidth)*3 + 0] = r;
lpRawImg->lpData[(col + row*defaultWidth)*3 + 1] = g;
lpRawImg->lpData[(col + row*defaultWidth)*3 + 2] = b;
}
}
char* lpFilename = NULL;
if(asprintf(&lpFilename, "%s-%d.jpg", argv[2], iFrames) < 0) {
printf("%s:%u Out of memory, skipping %d\n", __FILE__, __LINE__, iFrames);
} else {
printf("%s:%u Writing %s\n", __FILE__, __LINE__, lpFilename);
storeJpegImageFile(lpRawImg, lpFilename);
free(lpFilename);
}
free(lpRawImg->lpData);
free(lpRawImg);
lpRawImg = NULL;
}
/* Re-enqueue */
if(xioctl(hHandle, VIDIOC_QBUF, &buf) == -1) {
printf("%s:%u Queueing buffer %d failed ...\n", __FILE__, __LINE__, buf.index);
deviceClose(hHandle);
return 2;
}
iFrames = iFrames + 1;
}
}
/*
Stop streaming
*/
{
/* Disable streaming */
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if(xioctl(hHandle, VIDIOC_STREAMOFF, &type) == -1) {
printf("%s:%u Stream off failed\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
}
/*
Release buffers ...
*/
{
int iBuf;
for(iBuf = 0; iBuf < bufferCount; iBuf = iBuf + 1) {
munmap(lpBuffers[iBuf].lpBase, lpBuffers[iBuf].sLen);
}
}
{
struct v4l2_requestbuffers rqBuffers;
/*
Request 1 buffer (simple but not seamless, usually use 3+) ...
*/
memset(&rqBuffers, 0, sizeof(rqBuffers));
rqBuffers.count = 0;
rqBuffers.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
rqBuffers.memory = V4L2_MEMORY_MMAP;
if(xioctl(hHandle, VIDIOC_REQBUFS, &rqBuffers) == -1) {
printf("%s:%u Releasing buffers failed!\n", __FILE__, __LINE__);
deviceClose(hHandle);
return 2;
}
}
/*
Close camera at the end
*/
deviceClose(hHandle);
return 0;
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment