• 4299阅读
  • 1回复

[提问]关于V4L无法获取图像的问题 [复制链接]

上一主题 下一主题
离线leonchiong
 

只看楼主 倒序阅读 楼主  发表于: 2016-09-24
关于V4L无法获取图像的问题
最近在弄核心板上的摄像头程序, 使用了V4L的API. 在网上查了很多资料和教程, 按照其中一个教程已经Qt写好了一个LVideo4l类,并且移植到板子上的时候能够正常运行.
目前遇到的问题是在ioctl(fd, VIDIOCMCAPTURE, &(mmap));后,使用ioctl(fd, VIDIOCSYNC, &(frame_current)获取图像的时候总是报错,错误如下:

VideoIn port 1 open
Sensor power down
Sensor not to power down
Init NT_99141 in port 1
Sensor power down
Sensor not to power down
Sensor reset
I2C added
detectd sensor id0=14 id1=10
NTSetResolution:1
sensor change resolution begin-----
Programming sensor init value
Programming sensor resolution value = 1
sensor change resolution end-----
video driver open successful
m_v4ldev.capability W55FA92 Camera dev1 0 1 480 640 120 160 641
m_v4ldev.picture 0 0 0 16 0 7 0
m_v4ldev.picture 0 0 0 16 0 7 0
mbuf.size 2328576 mbuf.offsets 0xc7334c mbuf.frames 3
ioctrl VIDIOCMCAPTURE value 0
start get Image
error: v4l_grab_sync faild. Resource temporarily unavailable
error: v4l_grab_sync faild. Resource temporarily unavailable
error: v4l_grab_sync faild. Resource temporarily unavailable
error: v4l_grab_sync faild. Resource temporarily unavailable
error: v4l_grab_sync faild. Resource temporarily unavailable


请各位朋友指导一下问题的原因,谢谢!!!!!!!!!!!!!!
离线leonchiong

只看该作者 1楼 发表于: 2016-09-24
这是类的源码:
#include "lvideo4l.h"

LVideo4l::LVideo4l(QByteArray devname, QObject *parent) :
    QObject(parent)
{
    m_v4ldev.dev = devname.data();
}

LVideo4l::~LVideo4l()
{
}

QByteArray LVideo4l::getDevName()
{
    QByteArray devname;
    devname.append(m_v4ldev.dev);
    return devname;
}

void LVideo4l::setDevIndex(int index)
{
    m_v4ldev.dev = QByteArray(VIDEO_PATH(index)).data();
}

v4l_device LVideo4l::getV4lDevice()
{
    return m_v4ldev;
}

int LVideo4l::v4l_open()
{
    if((m_v4ldev.fd = open(m_v4ldev.dev, O_RDWR /*| O_NONBLOCK*/)) < 0) {
        qDebug() << "error: open" << m_v4ldev.dev << "failed." << strerror(errno);
        return -1;
    }
    //这两个函数就是即将要完成的获取设备信息的函数
    if(v4l_get_capability())
        return -1;
    if(v4l_get_picture())
        return -1;
    //    if(v4l_get_channels())
    //        return -1;

    return 0;
}

int LVideo4l::v4l_close()
{
    close(m_v4ldev.fd);
    return 0;
}

int LVideo4l::v4l_get_capability()
{
    if (ioctl(m_v4ldev.fd, VIDIOCGCAP, &(m_v4ldev.capability)) < 0) {
        qDebug() << "error: VIDIOCGCAP " << strerror(errno);
        return -1;
    }
    else
    {
        qDebug() << "m_v4ldev.capability"
                 << m_v4ldev.capability.name
                 << m_v4ldev.capability.audios
                 << m_v4ldev.capability.channels
                 << m_v4ldev.capability.maxheight
                 << m_v4ldev.capability.maxwidth
                 << m_v4ldev.capability.minheight
                 << m_v4ldev.capability.minwidth
                 << m_v4ldev.capability.type;
    }

    if (!(m_v4ldev.capability.type & VID_TYPE_CAPTURE)) {
        qDebug() << "Fatal: grab device does not handle capture. " << strerror(errno);
        return -1;
    }
    return 0;
}

int LVideo4l::v4l_get_picture()
{
    if (ioctl(m_v4ldev.fd, VIDIOCGPICT, &(m_v4ldev.picture)) < 0) {
        qDebug() << "error: VIDIOCGPICT" << strerror(errno);
        return -1;
    }
    else
    {
        qDebug() << "m_v4ldev.picture"
                 << m_v4ldev.picture.brightness
                 << m_v4ldev.picture.colour
                 << m_v4ldev.picture.contrast
                 << m_v4ldev.picture.depth
                 << m_v4ldev.picture.hue
                 << m_v4ldev.picture.palette
                 << m_v4ldev.picture.whiteness;

        //        if (m_v4ldev.picture.palette != VIDEO_PALETTE_YUV420P)
        m_v4ldev.picture.palette = VIDEO_PALETTE_YUV422;
        m_v4ldev.picture.depth= 16;
        v4l_set_picture(m_v4ldev.picture.brightness,
                        m_v4ldev.picture.hue,
                        m_v4ldev.picture.colour,
                        m_v4ldev.picture.contrast,
                        m_v4ldev.picture.whiteness,
                        m_v4ldev.picture.palette);

        qDebug() << "m_v4ldev.picture"
                 << m_v4ldev.picture.brightness
                 << m_v4ldev.picture.colour
                 << m_v4ldev.picture.contrast
                 << m_v4ldev.picture.depth
                 << m_v4ldev.picture.hue
                 << m_v4ldev.picture.palette
                 << m_v4ldev.picture.whiteness;
    }

    return 0;
}

int LVideo4l::v4l_get_channels()
{
    for(int i=0; i < m_v4ldev.capability.channels; i++){
        m_v4ldev.channel.channel = i;
        if(ioctl(m_v4ldev.fd, VIDIOCGCHAN, &(m_v4ldev.channel)) < 0)
        {
            qDebug() << "error: VIDIOCGCHAN" << strerror(errno);
            return -1;
        }
    }
    return 0;
}

int LVideo4l::v4l_set_picture(int br,int hue,int col,int cont,int white,int palette)
{
    if(br)
        m_v4ldev.picture.brightness = br;
    if(hue)
        m_v4ldev.picture.hue = hue;
    if(col)
        m_v4ldev.picture.colour = col;
    if(cont)
        m_v4ldev.picture.contrast = cont;
    if(white)
        m_v4ldev.picture.whiteness = white;
    if(palette)
        m_v4ldev.picture.palette = palette;
    if(ioctl(m_v4ldev.fd, VIDIOCSPICT, &(m_v4ldev.picture)) < 0)
    {
        qDebug() << "error: v4l set picture faild."<< strerror(errno);
        return -1;
    }
    return 0;
}

int LVideo4l::v4l_grab_picture(unsigned int size)
{
    if(read(m_v4ldev.fd, &(m_v4ldev.map), size) == 0)
    {
        qDebug() << "read v4ldev error, map is null.";
        return -1;
    }

    return 0;
}

int LVideo4l::v4l_get_mbuf()
{
    if (ioctl(m_v4ldev.fd, VIDIOCGMBUF, &(m_v4ldev.mbuf)) < 0) {
        m_v4ldev.picture.palette = VIDEO_PALETTE_YUV420P;
        m_v4ldev.picture.depth= 16;
        v4l_set_picture(m_v4ldev.picture.brightness,
                        m_v4ldev.picture.hue,
                        m_v4ldev.picture.colour,
                        m_v4ldev.picture.contrast,
                        m_v4ldev.picture.whiteness,
                        m_v4ldev.picture.palette);
        if (ioctl(m_v4ldev.fd, VIDIOCGMBUF, &(m_v4ldev.mbuf)) < 0) {
            qDebug() << "error: v4l get mbuf faild.";
            return -1;
        }
    }
    qDebug() << "mbuf.size" << m_v4ldev.mbuf.size
             << "mbuf.offsets" << m_v4ldev.mbuf.offsets
             << "mbuf.frames" << m_v4ldev.mbuf.frames;

    return 0;
}

int LVideo4l::v4l_mmap_init()
{
    if (v4l_get_mbuf() < 0)
        return -1;

    m_v4ldev.map = ( unsigned char*)mmap(0, m_v4ldev.mbuf.size, PROT_READ|PROT_WRITE, MAP_SHARED, m_v4ldev.fd, 0);
    if ((unsigned char*)-1 == m_v4ldev.map) {
        m_v4ldev.map = ( unsigned char*)mmap(0, m_v4ldev.mbuf.size, PROT_READ|PROT_WRITE, MAP_PRIVATE, m_v4ldev.fd, 0);

        if ((unsigned char*)-1 == m_v4ldev.map) {
            qDebug() << "error: v4l_mmap_init faild.";
            return -1;
        }
    }
    return 0;
}

int LVideo4l::v4l_grab_init(int width, int height)
{
    m_v4ldev.mmap.frame = m_v4ldev.mbuf.frames;

    m_v4ldev.mmap.width  = width;
    m_v4ldev.mmap.height = height;
    m_v4ldev.mmap.format = VIDEO_PALETTE_YUV420P;//m_v4ldev.picture.palette;
    m_v4ldev.frame_current = 0;
    m_v4ldev.frame_using[0] = FALSE;
    m_v4ldev.frame_using[1] = FALSE;
    m_v4ldev.frame_size = width * height * VIDEO_MAXFRAME;

    return v4l_grab_frame(0);
}

int LVideo4l::v4l_grab_frame(int frame)
{
    if (m_v4ldev.frame_using[frame]) {
        qDebug() << "warning: v4l_grab_frame frame" << frame << "is already used.";
        return -1;
    }

    m_v4ldev.mmap.frame = frame;
    frame = ioctl(m_v4ldev.fd, VIDIOCMCAPTURE, &(m_v4ldev.mmap));
    qDebug() << "ioctrl VIDIOCMCAPTURE value" << frame ;
    if (frame < 0) {
        qDebug() << "error: v4l_grab_frame faild."<< strerror(errno);
        return -1;
    }

    m_v4ldev.frame_using[m_v4ldev.mmap.frame] = TRUE;
    m_v4ldev.frame_current = m_v4ldev.mmap.frame;

    return 0;
}

int LVideo4l::v4l_grab_sync()
{
    if (ioctl(m_v4ldev.fd, VIDIOCSYNC, &(m_v4ldev.frame_current)) < 0) {
        qDebug() << "error: v4l_grab_sync faild."<< strerror(errno);
        return -1;
    }
    m_v4ldev.frame_using[m_v4ldev.frame_current] = FALSE;

    return 0;
}

unsigned char *LVideo4l::v4l_get_frame_adress()
{
    return (m_v4ldev.map + m_v4ldev.mbuf.offsets[m_v4ldev.frame_current]);
}
快速回复
限100 字节
 
上一个 下一个