Article 5ASQT Display output from camera

Display output from camera

by
piobair
from LinuxQuestions.org on (#5ASQT)
The objective is to capture a camera output using V4L2 and display it to the screen using SDL2.

I started with a program https://gist.github.com/lightbits/70399ac79ec005751e94 which captures the video stream and writes 200 frames of that to a .bmp file. It did not compile as downloaded; I massaged it, resulting in c920.c (see attached, c920.txt).
Then I took another program sdl.c (see attached, sdl.txt) which loads a .bmp file and displays it to the screen.

Attempting to marry the two programs together results in the following:
Code://c-sdl.c H264 raw capture
// compile as $ gcc c-sdl.c -lSDL2
// https://gist.github.com/lightbits/70399ac79ec005751e94
/* V4L2 video picture grabber
Copyright (C) 2009 Mauro Carvalho Chehab <mchehab@infradead.org>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
Modified by Ascend NTNU:
* H264 streaming for Logitech C920
* Commentary
* Rewrote from C to C++
* Output to single file instead of frame by frame
* Releasing on-device allocated memory by calling VIDIOC_REQBUF
with count 0
* Setting the framerate
*/

// How to compile
// --------------------------------------------------------
// Acquire the video 4 linux 2 development libraries (v4l2)
// $ sudo apt-get install libv4l-dev
// $ sudo apt-get install v4l-utils

// Description
// -----------------------------------------------------------
// The code will initiate a H264 stream from the camera device
// located at /dev/video1. It will store N frames concatenated
// in a file named output.raw.

// See https://github.com/bellbind/node-v4l2camera/wiki/V4l2-programming
// for general workflow with the v4l2 libraries.

#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/types.h>
#include <sys/time.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <libv4l2.h>

#include <SDL2/SDL.h>

struct v4l2_capability cap;
struct v4l2_cropcap cropcap;
struct v4l2_crop crop;
struct v4l2_format fmt;
struct v4l2_queryctrl qmt;

SDL_Window *window = NULL;
SDL_Renderer *renderer = NULL;
SDL_Texture *bitmapTex = NULL;
SDL_Surface *bitmapSurface = NULL;

struct timeval tv;

// Wrapper around v4l2_ioctl for programming the video device,
// that automatically retries the USB request if something
// unintentionally aborted the request.
int xioctl(int fh, int request, void *arg){
int r;
do {
r = ioctl(fh, request, arg);
} while (r == -1 && ((errno == EINTR) || (errno == EAGAIN)));
return r;
}

// Change these to your liking...
// or modify the program to take them cmd arguments!

// Number of frames that we will capture.
#define NUM_FRAME_CAPTURES 200

// This determines the number of "working" buffers we
// tell the device that it can use. I guess 3 is an OK
// amount? Maybe try less or more if it runs badly.
#define MMAP_BUFFERS 3

int main(int argc, char **argv){
char *device_name, code[5];
int i, r, posX = 100, posY = 100, pix_bytes, iframe, buffer_index, fd;
struct v4l2_buffer buf;
struct timeval tv;
device_name = "/dev/video0"; // future option: include in argv
// Open the device
fd = open(device_name, O_RDWR | O_NONBLOCK, 0);
if (fd < 0) {
printf("Failed to open device\n");
exit(EXIT_FAILURE);
}

if (-1 == xioctl(fd, VIDIOC_QUERYCAP, &cap)) {
printf("ERROR: %s\n", strerror(errno));
exit(EXIT_FAILURE);
}

if((cap.capabilities & 1) != 1){
printf("ERROR: This app cannot deal with device other than V4L2_CAP_VIDEO_CAPTURE ");
for(i=31; i> -1; i--){
printf("%d", (cap.capabilities >>i) &1);
if(!(i % 8))printf(" ");
}
printf("\n");
exit(0);
}
// see https://www.kernel.org/doc/html/v5.3/media/uapi/v4l/vidioc-querycap.html#c.v4l2_capability
// for other/additional capabilityes.


// Specify the format of the data we want from the camera
// Run v4l2-ctl --device=/dev/video1 --list-formats on the
// device to see that sort of pixel formats are supported!
/*v4l2_format fmt = {};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = FRAME_WIDTH;
fmt.fmt.pix.height = FRAME_HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_H264;
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
xioctl(fd, VIDIOC_S_FMT, &fmt);*/

// poll fd for device format
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; // == 1
if(-1 == xioctl(fd, VIDIOC_G_FMT, &fmt)){
printf("ERROR getting info from %s\n", device_name);
return 1;
}

// convert littleendian __u32 to string
code[3] = fmt.fmt.pix.pixelformat >>24;
code[2] = fmt.fmt.pix.pixelformat >>16 & 255;
code[1] = fmt.fmt.pix.pixelformat >>8 & 255;
code[0] = fmt.fmt.pix.pixelformat & 255;
code[4] = 0;
printf("%s data: bytes/line = %d; width = %d, height = %d image size = %d; format = %s\n", device_name, fmt.fmt.pix.bytesperline, fmt.fmt.pix.width, fmt.fmt.pix.height, fmt.fmt.pix.sizeimage, code);
if(fmt.fmt.pix.pixelformat == 0x47504a4d){ // == MJPG
pix_bytes = fmt.fmt.pix.sizeimage / fmt.fmt.pix.width / fmt.fmt.pix.height;

// My cameras reply that the fmt.fmt.pix.pixelformat = "MJPG"
// v4l2 recognizes "MJPG" as a non-standard format. I am assuming thta is functionally equivalent to RGBR
// https://www.kernel.org/doc/html/v4.10/media/uapi/v4l/pixfmt-013.html
// https://www.kernel.org/doc/html/v4.10/media/uapi/v4l/pixfmt-013.html
printf("Assuming MJPG == M JPEG = RGBR\n");
if(pix_bytes == 2)fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB565X;
else if(pix_bytes == 3) fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;
else{
printf("%d bytes per pixel; cannot guess format\n", pix_bytes);
return 0;
}
}

SDL_Init(SDL_INIT_VIDEO);
window = SDL_CreateWindow("Webcam", posX, posY, fmt.fmt.pix.width, fmt.fmt.pix.height, 0);

renderer = SDL_CreateRenderer(window, -1, SDL_RENDERER_ACCELERATED);

// https://www.kernel.org/doc/html/v4.11/media/uapi/v4l/pixfmt-013.html
// https://www.kernel.org/doc/html/v4.11/media/uapi/v4l/pixfmt-reserved.html

// Set streaming parameters, i.e. frames per second.
// You'll want to query the device for whether or not it
// supports setting the frame time, and what possible choices
// it supports.
// See http://stackoverflow.com/questions/13981933/v4l2-fcntl-ioctl-vidioc-s-parm-for-setting-fps-and-resolution-of-camera-capture
/*v4l2_streamparm parm = {};
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
parm.parm.output.timeperframe.numerator = 1;
parm.parm.output.timeperframe.denominator = 30;
xioctl(fd, VIDIOC_S_PARM, &parm);*/

// Sidenote: Run v4l-info /dev/video1 if you want to see what
// other stuff that the device supports.

// Check what format we _actually_ got
printf("Opened device with format:\n");
printf("Width: %d\n", fmt.fmt.pix.width);
printf("Height: %d\n", fmt.fmt.pix.height);
printf("Pixel format: 0x%x\n", fmt.fmt.pix.pixelformat);

// Request N buffers that are memory mapped between
// our application space and the device
struct v4l2_requestbuffers request;
request.count = MMAP_BUFFERS;
request.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
request.memory = V4L2_MEMORY_MMAP;
xioctl(fd, VIDIOC_REQBUFS, &request);

int num_buffers = request.count;
printf("Got %d buffers\n", num_buffers);

struct Buffer { void *start; size_t length;} buffers[4];

// Find out where each requested buffer is located in memory
// and map them into our application space
for (int buffer_index = 0; buffer_index < num_buffers; ++buffer_index) {
// struct v4l2_buffer buf;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = buffer_index;
if(-1 == xioctl(fd, VIDIOC_QUERYBUF, &buf)){
printf("ERROR Could not QUERYBUF\n");
return 1;
}

buffers[buffer_index].length = buf.length;
buffers[buffer_index].start =
mmap(0 /* start anywhere */,
buf.length,
PROT_READ | PROT_WRITE /* required */,
MAP_SHARED /* recommended */,
fd, buf.m.offset);

if (MAP_FAILED == buffers[buffer_index].start){
printf("mmap failed %d, %s\n", errno, strerror(errno));
exit(EXIT_FAILURE);
}
}

// Queue the buffers, i.e. indicate to the device
// that they are available for writing now.
for (int i = 0; i < num_buffers; ++i) {
// struct v4l2_buffer buf; - declared at head of main()
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
buf.index = i;
xioctl(fd, VIDIOC_QBUF, &buf);
}

// Start streazv4l_buf_type
enum v4l2_buf_type type;
type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
xioctl(fd, VIDIOC_STREAMON, &type);

// Open output file
FILE *fout = fopen("output.raw", "w");
if (!fout) {
printf("Could not open file output\n");
exit(EXIT_FAILURE);
}

// https://www.kernel.org/doc/html/v5.3/media/uapi/v4l/pixfmt-compressed.html#v4l2-pix-fmt-jpeg

// Capture n frames
for (int iframe = 0; iframe < NUM_FRAME_CAPTURES; iframe++) {
// The device will now output data continuously.
// We will use the FD_ZERO/FD_SET/select mechanisms
// to wait until there is data available from the
// device. We can specify how long we should wait,
// and timeout if we think too much time has passed.

fd_set fds;
r = 0;
do {
FD_ZERO(&fds);
FD_SET(fd, &fds);
tv.tv_sec = 2;
tv.tv_usec = 0;
r = select(fd + 1, &fds, NULL, NULL, &tv);
} while ((r == -1 && (errno = EINTR)));

if (r == -1) {
printf("select failed\n");
exit(EXIT_FAILURE);
}
if(iframe == 0) printf("Frame interval = %d u-seconds/event -> %f fps?\n", tv.tv_usec, 1.0 /tv.tv_usec *1e6);

// Data has arrived! Let's "dequeue" a buffer to get its data
xioctl(fd, VIDIOC_DQBUF, &buf);

// Now we have gotten the data into one of our buffers.
// buf.index STREAMONSTREAMON -> Which mmap'ed buffer is the data located in
// buffers[buf.index].start -> Where in memory is the data located
// buf.bytesused -> Size of data chunk in bytes

// Do whatever you want with the stream data here!
// -----------------------------------------------
// For now, write buffer to an output file. Since we
// stream in H264, you'll need something like ffmpeg
// to decode it if you vidioc_querybufwant to check the output. For
// example:
//
// $ ffmpeg -f h264 -i output.raw -vcodec copy output.mp4

fwrite(buffers[buf.index].start, buf.bytesused, 1, fout);
SDL_RWops *rw = SDL_RWFromMem(buffers[buf.index].start, buffers[buf.index].length);
bitmapSurface = SDL_LoadBMP_RW(rw, 0);
SDL_FreeSurface(bitmapSurface);

// Queue buffer for writing again
xioctl(fd, VIDIOC_QBUF, &buf);
}

// Turn off stream
type = 1; // == V4L2_BUF_TYPE_VIDEO_CAPTURE;
xioctl(fd, VIDIOC_STREAMOFF, &type);

SDL_RenderClear(renderer);
SDL_RenderCopy(renderer, bitmapTex, NULL, NULL);
SDL_RenderPresent(renderer);

fclose(fout);

// Unmap buffers
for (int i = 0; i < num_buffers; ++i)
munmap(buffers[i].start, buffers[i].length);

// Tell the device that it can release memory
request.count = 0;
request.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
request.memory = V4L2_MEMORY_MMAP;
xioctl(fd, VIDIOC_REQBUFS, &request);

close(fd);
SDL_DestroyRenderer(renderer);
SDL_DestroyWindow(window);

SDL_Quit();
}This code creates an appropriate window, but does not fill it with an image.
What is missing to get the image?
Attached Files
txt.gifc920.txt (10.6 KB)
txt.gifsdl.txt (1.4 KB)
latest?d=yIl2AUoC8zA latest?i=xi8qUVuZTPw:vl947ulYdQA:F7zBnMy latest?i=xi8qUVuZTPw:vl947ulYdQA:V_sGLiP latest?d=qj6IDK7rITs latest?i=xi8qUVuZTPw:vl947ulYdQA:gIN9vFwxi8qUVuZTPw
External Content
Source RSS or Atom Feed
Feed Location https://feeds.feedburner.com/linuxquestions/latest
Feed Title LinuxQuestions.org
Feed Link https://www.linuxquestions.org/questions/
Reply 0 comments