0

I've been trying to write a c program to capture photos from a uvc camera using v4l. I've managed to gt it to work for the most part but every second time i run the program i get a zero-filled yuv buffer which results in a "blank" green image. I tried capturing video using ffmpeg and i get similar results. Every second time i run the ffmpeg command ffmpeg -f v4l2 -framerate 30 -video_size 1920x1080 -i /dev/video2 video.mp4, i get a warning that says [video4linux2,v4l2 @ 0x5567c2266f80] Dequeued v4l2 buffer contains corrupted data (0 bytes). and the video is unplayable. Additionally, a dmesg log is generated with the content uvcvideo 2-1:1.1: Non-zero status (-71) in video completion handler. If i cancel the capture at that point and remove and re-add the uvcvideo kernel module (or unplug and replug the camera), everything works fine. After an extensive web search, i foundthis question but the answer didn't quite help me as it suggests a usb clear feature request but doesn't further explain so i don't know what feature i'm supposed to send the request to.

Does anyone know how to tackle that problem?

Thanks in advance

Note: I asked this question on UL a few days ago but i've concluded that it's more appropriate to ask it here.

The code:


#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <jpeglib.h>

#define VIDEO_DEVICE "/dev/video2"
#define CAPTURE_WIDTH 1920
#define CAPTURE_HEIGHT 1080
#define BUFFER_SIZE (CAPTURE_WIDTH * CAPTURE_HEIGHT * 2)

// Function to convert YUV to RGB
void yuv2rgb(unsigned char *uyvy, unsigned char *rgb, int width, int height) {
    int i, j;
    int y0, u, y1, v;
    int r, g, b;
    int index_uyvy, index_rgb;

    for (i = 0; i < height; i++) {
        for (j = 0; j < width; j += 2) {
            index_uyvy = (i * width + j) * 2;
            index_rgb = (i * width + j) * 3;
        
            u = uyvy[index_uyvy ];
            y0 = uyvy[index_uyvy + 1];
            v = uyvy[index_uyvy + 2];
            y1 = uyvy[index_uyvy + 3];
          
            // Adjust U and V range to -128 to 127
            u -= 128;
            v -= 128;

            // UYVY to RGB conversion
            r = y0 + 1.402 * v;
            g = y0 - 0.344136 * u - 0.714136 * v;
            b = y0 + 1.772 * u;

            r = (r < 0) ? 0 : ((r > 255) ? 255 : r);
            g = (g < 0) ? 0 : ((g > 255) ? 255 : g);
            b = (b < 0) ? 0 : ((b > 255) ? 255 : b);

            rgb[index_rgb] = r;
            rgb[index_rgb + 1] = g;
            rgb[index_rgb + 2] = b;

            // UYVY to RGB conversion
            r = y1 + 1.402 * v;
            g = y1 - 0.344136 * u - 0.714136 * v;
            b = y1 + 1.772 * u;

            r = (r < 0) ? 0 : ((r > 255) ? 255 : r);
            g = (g < 0) ? 0 : ((g > 255) ? 255 : g);
            b = (b < 0) ? 0 : ((b > 255) ? 255 : b);

            rgb[index_rgb + 3] = r;
            rgb[index_rgb + 4] = g;
            rgb[index_rgb + 5] = b;
        }
    }
}



int main() {
    int videoFd;
    struct v4l2_capability cap;
    struct v4l2_format format;
    struct v4l2_requestbuffers reqbuf;
    struct v4l2_buffer buf;

    // Open the video device
    videoFd = open(VIDEO_DEVICE, O_RDWR);
    if (videoFd < 0) {
        perror("Failed to open video device");
        return 1;
    }

    // Check device capabilities
    if (ioctl(videoFd, VIDIOC_QUERYCAP, &cap) < 0) {
        perror("Failed to query device capabilities");
        close(videoFd);
        return 1;
    }

    if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
        fprintf(stderr, "Video capture not supported\n");
        close(videoFd);
        return 1;
    }

    // Set the desired capture format
    memset(&format, 0, sizeof(format));
    format.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    format.fmt.pix.width = CAPTURE_WIDTH;
    format.fmt.pix.height = CAPTURE_HEIGHT;
    format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY; // YUV 4:2:2 format
    if (ioctl(videoFd, VIDIOC_S_FMT, &format) < 0) {
        perror("Failed to set video format");
        close(videoFd);
        return 1;
    }

    // Request a single buffer for capture
    memset(&reqbuf, 0, sizeof(reqbuf));
    reqbuf.count = 1;
    reqbuf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    reqbuf.memory = V4L2_MEMORY_MMAP;
    if (ioctl(videoFd, VIDIOC_REQBUFS, &reqbuf) < 0) {
        perror("Failed to request buffer for capture");
        close(videoFd);
        return 1;
    }

    // Map the buffer for user-space access
    memset(&buf, 0, sizeof(buf));
    buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    buf.memory = V4L2_MEMORY_MMAP;
    buf.index = 0;
    if (ioctl(videoFd, VIDIOC_QUERYBUF, &buf) < 0) {
        perror("Failed to query buffer");
        close(videoFd);
        return 1;
    }

    void *buffer = mmap(NULL, buf.length, PROT_READ | PROT_WRITE, MAP_SHARED, videoFd, buf.m.offset);
    if (buffer == MAP_FAILED) {
        perror("Failed to map buffer");
        close(videoFd);
        return 1;
    }

    // Queue the buffer for capture
    if (ioctl(videoFd, VIDIOC_QBUF, &buf) < 0) {
        perror("Failed to queue buffer");
        munmap(buffer, buf.length);
        close(videoFd);
        return 1;
    }

    // Start capturing frames
    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if (ioctl(videoFd, VIDIOC_STREAMON, &type) < 0) {
        perror("Failed to start capturing");
        munmap(buffer, buf.length);
        close(videoFd);
        return 1;
    }

    // Wait for the frame to be captured
    if (ioctl(videoFd, VIDIOC_DQBUF, &buf) < 0) {
        perror("Failed to dequeue buffer");
        munmap(buffer, buf.length);
        close(videoFd);
        return 1;
    }

    // Convert the captured frame from YUV to RGB
    unsigned char *rgbBuffer = (unsigned char *)malloc(CAPTURE_WIDTH * CAPTURE_HEIGHT * 3);
    yuv2rgb((unsigned char *)buffer, rgbBuffer, CAPTURE_WIDTH, CAPTURE_HEIGHT);

    // Save the RGB frame as a JPEG image
    FILE *file = fopen("captured_frame.jpg", "wb");
    if (file == NULL) {
        perror("Failed to open output file");
        munmap(buffer, buf.length);
        free(rgbBuffer);
        close(videoFd);
        return 1;
    }

    // Use libjpeg to write the RGB data as a JPEG image
    struct jpeg_compress_struct cinfo;
    struct jpeg_error_mgr jerr;

    cinfo.err = jpeg_std_error(&jerr);
    jpeg_create_compress(&cinfo);
    jpeg_stdio_dest(&cinfo, file);

    cinfo.image_width = CAPTURE_WIDTH;
    cinfo.image_height = CAPTURE_HEIGHT;
    cinfo.input_components = 3;
    cinfo.in_color_space = JCS_RGB;

    jpeg_set_defaults(&cinfo);
    jpeg_set_quality(&cinfo, 80, TRUE);
    jpeg_start_compress(&cinfo, TRUE);

    JSAMPROW row_pointer[1];
    while (cinfo.next_scanline < cinfo.image_height) {
        row_pointer[0] = &rgbBuffer[cinfo.next_scanline * cinfo.image_width * cinfo.input_components];
        jpeg_write_scanlines(&cinfo, row_pointer, 1);
    }

    jpeg_finish_compress(&cinfo);
    fclose(file);
    jpeg_destroy_compress(&cinfo);

    // Stop capturing frames
    if (ioctl(videoFd, VIDIOC_STREAMOFF, &type) < 0) {
        perror("Failed to stop capturing");
        munmap(buffer, buf.length);
        free(rgbBuffer);
        close(videoFd);
        return 1;
    }

    // Clean up resources
    munmap(buffer, buf.length);
    free(rgbBuffer);
    close(videoFd);

    printf("Frame captured and saved successfully.\n");

    return 0;
}
Hristos
  • 3
  • 3

0 Answers0