这是类的源码:
#include "lvideo4l.h"
LVideo4l::LVideo4l(QByteArray devname, QObject *parent) :
QObject(parent)
{
m_v4ldev.dev = devname.data();
}
LVideo4l::~LVideo4l()
{
}
QByteArray LVideo4l::getDevName()
{
QByteArray devname;
devname.append(m_v4ldev.dev);
return devname;
}
void LVideo4l::setDevIndex(int index)
{
m_v4ldev.dev = QByteArray(VIDEO_PATH(index)).data();
}
v4l_device LVideo4l::getV4lDevice()
{
return m_v4ldev;
}
int LVideo4l::v4l_open()
{
if((m_v4ldev.fd = open(m_v4ldev.dev, O_RDWR /*| O_NONBLOCK*/)) < 0) {
qDebug() << "error: open" << m_v4ldev.dev << "failed." << strerror(errno);
return -1;
}
//这两个函数就是即将要完成的获取设备信息的函数
if(v4l_get_capability())
return -1;
if(v4l_get_picture())
return -1;
// if(v4l_get_channels())
// return -1;
return 0;
}
int LVideo4l::v4l_close()
{
close(m_v4ldev.fd);
return 0;
}
int LVideo4l::v4l_get_capability()
{
if (ioctl(m_v4ldev.fd, VIDIOCGCAP, &(m_v4ldev.capability)) < 0) {
qDebug() << "error: VIDIOCGCAP " << strerror(errno);
return -1;
}
else
{
qDebug() << "m_v4ldev.capability"
<< m_v4ldev.capability.name
<< m_v4ldev.capability.audios
<< m_v4ldev.capability.channels
<< m_v4ldev.capability.maxheight
<< m_v4ldev.capability.maxwidth
<< m_v4ldev.capability.minheight
<< m_v4ldev.capability.minwidth
<< m_v4ldev.capability.type;
}
if (!(m_v4ldev.capability.type & VID_TYPE_CAPTURE)) {
qDebug() << "Fatal: grab device does not handle capture. " << strerror(errno);
return -1;
}
return 0;
}
int LVideo4l::v4l_get_picture()
{
if (ioctl(m_v4ldev.fd, VIDIOCGPICT, &(m_v4ldev.picture)) < 0) {
qDebug() << "error: VIDIOCGPICT" << strerror(errno);
return -1;
}
else
{
qDebug() << "m_v4ldev.picture"
<< m_v4ldev.picture.brightness
<< m_v4ldev.picture.colour
<< m_v4ldev.picture.contrast
<< m_v4ldev.picture.depth
<< m_v4ldev.picture.hue
<< m_v4ldev.picture.palette
<< m_v4ldev.picture.whiteness;
// if (m_v4ldev.picture.palette != VIDEO_PALETTE_YUV420P)
m_v4ldev.picture.palette = VIDEO_PALETTE_YUV422;
m_v4ldev.picture.depth= 16;
v4l_set_picture(m_v4ldev.picture.brightness,
m_v4ldev.picture.hue,
m_v4ldev.picture.colour,
m_v4ldev.picture.contrast,
m_v4ldev.picture.whiteness,
m_v4ldev.picture.palette);
qDebug() << "m_v4ldev.picture"
<< m_v4ldev.picture.brightness
<< m_v4ldev.picture.colour
<< m_v4ldev.picture.contrast
<< m_v4ldev.picture.depth
<< m_v4ldev.picture.hue
<< m_v4ldev.picture.palette
<< m_v4ldev.picture.whiteness;
}
return 0;
}
int LVideo4l::v4l_get_channels()
{
for(int i=0; i < m_v4ldev.capability.channels; i++){
m_v4ldev.channel.channel = i;
if(ioctl(m_v4ldev.fd, VIDIOCGCHAN, &(m_v4ldev.channel)) < 0)
{
qDebug() << "error: VIDIOCGCHAN" << strerror(errno);
return -1;
}
}
return 0;
}
int LVideo4l::v4l_set_picture(int br,int hue,int col,int cont,int white,int palette)
{
if(br)
m_v4ldev.picture.brightness = br;
if(hue)
m_v4ldev.picture.hue = hue;
if(col)
m_v4ldev.picture.colour = col;
if(cont)
m_v4ldev.picture.contrast = cont;
if(white)
m_v4ldev.picture.whiteness = white;
if(palette)
m_v4ldev.picture.palette = palette;
if(ioctl(m_v4ldev.fd, VIDIOCSPICT, &(m_v4ldev.picture)) < 0)
{
qDebug() << "error: v4l set picture faild."<< strerror(errno);
return -1;
}
return 0;
}
int LVideo4l::v4l_grab_picture(unsigned int size)
{
if(read(m_v4ldev.fd, &(m_v4ldev.map), size) == 0)
{
qDebug() << "read v4ldev error, map is null.";
return -1;
}
return 0;
}
int LVideo4l::v4l_get_mbuf()
{
if (ioctl(m_v4ldev.fd, VIDIOCGMBUF, &(m_v4ldev.mbuf)) < 0) {
m_v4ldev.picture.palette = VIDEO_PALETTE_YUV420P;
m_v4ldev.picture.depth= 16;
v4l_set_picture(m_v4ldev.picture.brightness,
m_v4ldev.picture.hue,
m_v4ldev.picture.colour,
m_v4ldev.picture.contrast,
m_v4ldev.picture.whiteness,
m_v4ldev.picture.palette);
if (ioctl(m_v4ldev.fd, VIDIOCGMBUF, &(m_v4ldev.mbuf)) < 0) {
qDebug() << "error: v4l get mbuf faild.";
return -1;
}
}
qDebug() << "mbuf.size" << m_v4ldev.mbuf.size
<< "mbuf.offsets" << m_v4ldev.mbuf.offsets
<< "mbuf.frames" << m_v4ldev.mbuf.frames;
return 0;
}
int LVideo4l::v4l_mmap_init()
{
if (v4l_get_mbuf() < 0)
return -1;
m_v4ldev.map = ( unsigned char*)mmap(0, m_v4ldev.mbuf.size, PROT_READ|PROT_WRITE, MAP_SHARED, m_v4ldev.fd, 0);
if ((unsigned char*)-1 == m_v4ldev.map) {
m_v4ldev.map = ( unsigned char*)mmap(0, m_v4ldev.mbuf.size, PROT_READ|PROT_WRITE, MAP_PRIVATE, m_v4ldev.fd, 0);
if ((unsigned char*)-1 == m_v4ldev.map) {
qDebug() << "error: v4l_mmap_init faild.";
return -1;
}
}
return 0;
}
int LVideo4l::v4l_grab_init(int width, int height)
{
m_v4ldev.mmap.frame = m_v4ldev.mbuf.frames;
m_v4ldev.mmap.width = width;
m_v4ldev.mmap.height = height;
m_v4ldev.mmap.format = VIDEO_PALETTE_YUV420P;//m_v4ldev.picture.palette;
m_v4ldev.frame_current = 0;
m_v4ldev.frame_using[0] = FALSE;
m_v4ldev.frame_using[1] = FALSE;
m_v4ldev.frame_size = width * height * VIDEO_MAXFRAME;
return v4l_grab_frame(0);
}
int LVideo4l::v4l_grab_frame(int frame)
{
if (m_v4ldev.frame_using[frame]) {
qDebug() << "warning: v4l_grab_frame frame" << frame << "is already used.";
return -1;
}
m_v4ldev.mmap.frame = frame;
frame = ioctl(m_v4ldev.fd, VIDIOCMCAPTURE, &(m_v4ldev.mmap));
qDebug() << "ioctrl VIDIOCMCAPTURE value" << frame ;
if (frame < 0) {
qDebug() << "error: v4l_grab_frame faild."<< strerror(errno);
return -1;
}
m_v4ldev.frame_using[m_v4ldev.mmap.frame] = TRUE;
m_v4ldev.frame_current = m_v4ldev.mmap.frame;
return 0;
}
int LVideo4l::v4l_grab_sync()
{
if (ioctl(m_v4ldev.fd, VIDIOCSYNC, &(m_v4ldev.frame_current)) < 0) {
qDebug() << "error: v4l_grab_sync faild."<< strerror(errno);
return -1;
}
m_v4ldev.frame_using[m_v4ldev.frame_current] = FALSE;
return 0;
}
unsigned char *LVideo4l::v4l_get_frame_adress()
{
return (m_v4ldev.map + m_v4ldev.mbuf.offsets[m_v4ldev.frame_current]);
}