当前位置: 首页 > 工具软件 > L2J > 使用案例 >

LInux使用V4L2打开UVC摄像头

柴亦
2023-12-01

直接贴代码,编译工具用了qt

#include <errno.h>
#include <fcntl.h>
#include <linux/videodev2.h>
#include <linux/v4l2-controls.h>
#include <stdint.h>
#include <stdio.h>
#include <string.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <unistd.h>
#include <stdlib.h>
#include <string.h>
#include <dirent.h>
#include <stdbool.h>
#include <math.h>
#include "thread"

#define TRUE                1
#define FALSE               0
#define MMAP_SIZE           4

class CameraPrivate;
class Camera : public QObject
{
    Q_OBJECT
public:
    explicit Camera(QObject *parent = nullptr);
    ~Camera();

    void searchDevice();
    bool running();
    int open(const char *device_dir);
    void close();

private:
    friend class CameraPrivate;
    QScopedPointer<CameraPrivate> p;

Q_SIGNALS:
    void frame(QPixmap);
};

class CameraPrivate
{
public:
    explicit CameraPrivate();
    ~CameraPrivate();

    int fd;                                 //设备描述符
    struct v4l2_streamparm stream_para;     //结构体v4l2_streamparm来描述视频流的属性
    struct v4l2_capability cap;             //取得设备的capability,看看设备具有什么功能,比如是否具有视频输入,或者音频输入输出等
    struct v4l2_fmtdesc fmtdesc;            //枚举设备所支持的image format:  VIDIOC_ENUM_FMT
    struct v4l2_format fmt;                 //子结构体struct v4l2_pix_format设置摄像头采集视频的宽高和类型:V4L2_PIX_FMT_YYUV V4L2_PIX_FMT_YUYV
    struct v4l2_requestbuffers req;         //向驱动申请帧缓冲的请求,里面包含申请的个数
    struct v4l2_buffer buf;                 //代表驱动中的一帧
    struct v4l2_control ctrl;
    uint32_t width;
    uint32_t height;

    bool thread_exit;
    std::thread camera_thread;

    struct buffer                           //从相机获得的数据缓存
    {
        void * start;
        unsigned int length;
        long long int timestamp;
    } *buffers;

    void searchDevice();
    int v4l2_init(const char *device_dir);
    int v4l2_grab();
    int v4l2_running();
    int v4l2_control(const __u32 &id, const __s32 &value);
    int v4l2_release();
};

void CameraPrivate::searchDevice()
{
    std::string device = "/dev";
    std::string video = "video";
    char* FILE_PTR = (char *)device.c_str();

    DIR *dir;
    struct dirent *ptr;

    if( (dir = opendir(FILE_PTR)) == NULL )
    {
        printf("open dir error\n");
        return;
    }

    while ((ptr=readdir(dir)) != NULL)
    {
        std::string name = "";
        name = (char*)ptr->d_name;
        if( name.find("video") != std::string::npos )
        {
            std::string device_dir = device + "/" + name;
            printf("device dir: %s\n", device_dir.c_str());
        }
    }
    closedir(dir);
}

int CameraPrivate::v4l2_init(const char *device_dir)
{
    fd = open(device_dir, O_RDWR);
    if( fd < 0 ) {
        printf("camera open fail, device dir: %s\n", device_dir);
        return FALSE;
    }

    if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == -1) {
        printf("unable querying capabilities\n");
        return FALSE;
    }
    else {
        printf( "Driver Caps:\n"
                "  Driver: \"%s\"\n"
                "  Card: \"%s\"\n"
                "  Bus: \"%s\"\n"
                "  Version: %d\n"
                "  Capabilities: %x\n",
                cap.driver,
                cap.card,
                cap.bus_info,
                cap.version,
                cap.capabilities);
    }

    fmtdesc.index = 0;
    fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    printf("Support format: \n");
    while(ioctl(fd, VIDIOC_ENUM_FMT, &fmtdesc) != -1)         // 获取当前视频设备支持的视频格式
    {
        printf("\t%d. %s\n", fmtdesc.index + 1, fmtdesc.description);
        fmtdesc.index ++;
        if (fmtdesc.pixelformat == V4L2_PIX_FMT_RGB24)
        {
            v4l2_frmsizeenum frame;
            frame.index = 0;
            frame.pixel_format = V4L2_PIX_FMT_YUYV;
            while (ioctl(fd, VIDIOC_ENUM_FRAMESIZES, &frame) >= 0)
            {
                frame.index++;
                width = width < frame.discrete.height ? frame.discrete.height : width;
                height = height < frame.discrete.width ? frame.discrete.width : height;
            }
        }
    }

    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = width;
    fmt.fmt.pix.height = height;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_RGB24;    //这里设置图像数据格式,根据摄像头支持的像素格式配置
    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
    if (ioctl(fd, VIDIOC_S_FMT, &fmt) == -1)         // 设置视频设备的视频数据格式,例如设置视频图像数据的长、宽,图像格式(JPEG、YUYV格式)
    {
       printf("Setting Pixel Format error\n");
       return FALSE;
    }
    if(ioctl(fd, VIDIOC_G_FMT, &fmt) == -1)          //获取图像格式
    {
       printf("Unable to get format\n");
       return FALSE;
    }
    printf("frame size: %d * %d\npixel format: %d\n", width, height, fmt.fmt.pix.pixelformat);

    memset(&stream_para, 0, sizeof(struct v4l2_streamparm));
    stream_para.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    stream_para.parm.capture.timeperframe.denominator = 25;
    stream_para.parm.capture.timeperframe.numerator = 1;

    if(ioctl(fd, VIDIOC_S_PARM, &stream_para) == -1) {
        printf("Unable to set frame rate\n");
        return FALSE;
    }
    if(ioctl(fd, VIDIOC_G_PARM, &stream_para) == -1) {
        printf("Unable to get frame rate\n");
        return FALSE;
    }
    printf("numerator:%d\ndenominator:%d\n", stream_para.parm.capture.timeperframe.numerator,
                                             stream_para.parm.capture.timeperframe.denominator);

    return TRUE;
}

int CameraPrivate::v4l2_grab()
{
    req.count = MMAP_SIZE;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;

    if (ioctl(fd, VIDIOC_REQBUFS, &req) == -1)        //开启内存映射或用户指针I/O
    {
        printf("Requesting Buffer error\n");
        return FALSE;
    }
    // mmap for buffers
    buffers = (buffer *)malloc(req.count * sizeof(*buffers));
    if( !buffers )
    {
        printf("Out of memory\n");
        return FALSE;
    }

    unsigned int i;
    for(i = 0; i < req.count; i ++)
    {
        //struct v4l2_buffer buf = {0};
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = i;
        if(ioctl(fd, VIDIOC_QUERYBUF, &buf) == -1)  // 查询已经分配的V4L2的视频缓冲区的相关信息,包括视频缓冲区的使用状态、
        {
            //在内核空间的偏移地址、缓冲区长度等。
            printf("Querying Buffer error\n");
            return FALSE;
        }
        buffers[i].length = buf.length;
        buffers[i].start = (unsigned char*)mmap(NULL,
                                                buf.length,
                                                PROT_READ | PROT_WRITE, MAP_SHARED,
                                                fd,
                                                buf.m.offset);

        if(buffers[i].start == MAP_FAILED)
        {
            printf("buffer map error\n");
            return FALSE;
        }
    }

    // queue
    for(i = 0; i <req.count; i ++)
    {
        buf.index = i;
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        if(ioctl(fd,VIDIOC_QBUF,&buf))     // 投放一个空的视频缓冲区到视频缓冲区输入队列中
        {
            printf("query buffer error\n");
            return FALSE;
        }
    }
    // starting
    enum v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    if(ioctl(fd,VIDIOC_STREAMON,&type) == -1)  //
    {
        printf("stream on error\n");
        return FALSE;
    }
    return TRUE;
}

int CameraPrivate::v4l2_running()
{
    camera_thread = std::thread([=](){
        thread_exit = false;

        uint32_t i;
        QImage image;
        QPixmap pixmap;
        while (!thread_exit) {
            for(i = 0; i < MMAP_SIZE; i ++) {
                buf.index = i;
                ioctl(fd, VIDIOC_DQBUF, &buf);

				// 图像数据
                uint8_t *image_buf = reinterpret_cast<uint8_t *>(buffers[i].start);
                image = QImage(image_buf, width, height, QImage::Format_RGB888);
                pixmap.convertFromImage(image);
                emit f->frame(pixmap);

                ioctl(fd, VIDIOC_QBUF, &buf);
            }
        }

        v4l2_release();
    });
    return TRUE;
}

int CameraPrivate::v4l2_control(const __u32 &id, const __s32 &value)
{
    if( fd < 0 ) {
        return FALSE;
    }
    ctrl.id = id;
    ctrl.value = value;

    if (ioctl(fd, VIDIOC_S_CTRL, &ctrl) == -1) {
        printf("v4l2_control error\n");
        return FALSE;
    }
    return TRUE;
}

int CameraPrivate::v4l2_release()
{
    if( fd < 0 ) {
        return FALSE;
    }

    enum v4l2_buf_type type;

    //关闭流
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    ioctl(fd, VIDIOC_STREAMOFF, &type);

    //关闭内存映射
    if( buffers ) {
        for(int i = 0; i < MMAP_SIZE; i ++) {
            if( buffers[i].start ) {
                munmap(buffers[i].start, buffers[i].length);
            }
        }
        free(buffers);
    }

    //关闭设备
    close(fd);
    fd = -1;
    printf("close fd\n");
    return TRUE;
}

void Camera::searchDevice()
{
    p->searchDevice();
}

bool Camera::running()
{
    return !p->thread_exit;
}

int Camera::open(const char *device_dir)
{
    if( running() ) {
        return TRUE;
    }
    if( p->v4l2_init(device_dir) ) {
        if( p->v4l2_grab() ) {
            p->v4l2_running();
            return TRUE;
        }
    }
    return FALSE;
}

void Camera::close()
{
    if( running() ) {
        p->thread_exit = true;
        p->camera_thread.join();
    }
}
 类似资料: