我在做基于qt下显示摄像头获取的信息,就是显示不出来,大家看看我错在哪个地方啊 ?
#include <qapplication.h>
#include <qpushbutton.h>
#include <qwidget.h>
#include <qcolor.h>
#include <qtimer.h>
#include <qframe.h>
#include <math.h>
#include <qimage.h>
#include <qpainter.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <linux/videodev.h>
#include <sys/ioctl.h>
#include <fcntl.h>
#include <linux/fb.h>
#include <sys/mman.h>
#include <string.h>
 
#define ERR_FRAME_BUFFER 1
#define ERR_VIDEO_OPEN  2
#define ERR_VIDEO_GCAP  3
#define ERR_VIDEO_GPIC  4
#define ERR_VIDEO_SPIC  5
#define ERR_SYNC     6
#define ERR_FRAME_USING   7
#define ERR_GET_FRAME     8
int screensize;
#define V4L_FILE "/dev/video0" 
#define DEFAULT_PALETTE VIDEO_PALETTE_RGB565
class MyWidget : public QWidget
{
//O_OBJECT
public:
    MyWidget( QWidget *parent=0, const char *name=0 );
    //unsigned char* get_image();
     //void get_image();
    int open_video();    
    int get_grab_frame();
    //void paintEvent(QPaintEvent *);
//protected:
    //virtual void paintEvent(QPaintEvent *);
    void paintEvent(QPaintEvent *);
private:
    QTimer *timer;
    QFrame *frame;
    //unsigned short* imageptr;
    unsigned short* imageptr;
    int dep;
    int pal;
    int width;
    int height;
    // FrameBuffer 信息
    struct fb_var_screeninfo vinfo;  // FrameBuffer屏幕可变的信息
    struct fb_fix_screeninfo finfo;  // FrameBuffer固定不变的信息
    //video4linux信息 
     int fd;
    struct video_capability  capability; 
    struct video_buffer  buffer;   
    struct video_picture  picture;  
    struct video_mmap  map;    
    struct video_mbuf  mbuf;        
    //unsigned short *pmap;
    unsigned char  *pmap;
    int frame_current;//what 's the frame number being captured currently?
    int frame_using[VIDEO_MAX_FRAME];//帧的状态没有采集还是等待结束?
    int screensize;
};
MyWidget::MyWidget( QWidget *parent, const char *name )
        : QWidget( parent, name )
{  
   dep=16;
   pal=VIDEO_PALETTE_RGB565;
   width=320;
   height=240;
   frame_current = 0;
   frame_using[0] = 0;
   frame_using[1] = 0;
   
   setCaption("MyJob");
   frame=new QFrame(this,"frame");
   frame->setBackgroundColor(black);
   frame->setGeometry(QRect(40,40,402,252));
   QTimer *timer=new QTimer(this,"timer");
   timer = new QTimer(this);
   connect(timer, SIGNAL(timeout()), SLOT(update()));
   timer->start(30);
   //QTimer::singleShot(50,this,SLOT(update()));
   printf("timer is using\n");
}
int MyWidget::get_grab_frame()
{
   
   if (frame_using[frame_current]) {
      fprintf(stderr, "get_grab_frame: frame %d is already used.\n", frame_current);
      return ERR_FRAME_USING;
   }
   map.frame = frame_current;
   /**
    Start Picture capture from this moment
   */
   if (ioctl(fd, VIDIOCMCAPTURE, &(map)) < 0) {
      perror("v4l_grab_frame");
      return ERR_GET_FRAME;
   }
   //置为采集忙状态
   frame_using[frame_current] = 1;
   
  if (ioctl(fd, VIDIOCSYNC, &(frame_current)) < 0) // 等待帧同步
   {  
      perror("v4l_grab_sync");
      return ERR_SYNC;
   }
  frame_using[frame_current] = 0 ;//采集完毕置0
  //return RIGHT_GRAP_FRAME;
 
}
int MyWidget::open_video()
{
  // 打开视频设备
       if ((fd = open(V4L_FILE, O_RDWR)) < 0)
  {
      perror("v4l_open:");
      return ERR_VIDEO_OPEN;
   }
    printf("=============Open Video Success=======================");
    
   // 获取设备
   if (ioctl(fd, VIDIOCGCAP, &(capability)) < 0)
   {
      perror("v4l_get_capability:");
      return ERR_VIDEO_GCAP;
   }
   printf("Camera found: %s,maxwidth:%d,maxheight:%d,minwidth:%d,minheight:%d \n",capability.name,capability.maxwidth,capability.maxheight,capability.minwidth,capability.minheight);
   printf("=============Get Device Success=======================");
  // 获取图象 
   if (ioctl(fd, VIDIOCGPICT, &(picture)) < 0)
   {
      perror("v4l_get_picture");
      return ERR_VIDEO_GPIC;
   }
      printf("=============Get Picture Success=======================");
      printf("=====Capture depth:%d,Palette:%d,brightness:%d,hue:%d,contrast:%d===============\n",picture.depth,picture.palette,picture.brightness,picture.hue,picture.contrast);
   // 设置图象
   picture.palette = pal;  // 调色板
   picture.depth = dep;   // 像素深度
 
   map.format =pal;
   if (ioctl(fd, VIDIOCSPICT, &(picture)) < 0)
   {
      perror("v4l_set_palette");
      return ERR_VIDEO_SPIC;
   }
   printf("=====Capture depth:%d,Palette:%d,brightness:%d,hue:%d,contrast:%d===============\n",picture.depth,picture.palette,picture.brightness,picture.hue,picture.contrast);
   //
   /*vd->map.width = width;   // width;
   vd->map.height = height;  // height;
   vd->map.format = vd->picture.palette;*/
   /*frame_current = 0;
   frame_using[0] = 0;
   frame_using[1] = 0;*/
   
   memset (&(mbuf), 0, sizeof(mbuf));
   // 获取摄像头缓冲区信息
   if (ioctl(fd, VIDIOCGMBUF, &(mbuf)) < 0)
   {
      perror("v4l_get_mbuf");
      return -1;
   }
   printf ("VIDIOCGMBUF size %d  frames %d  offets[0]=%d offsets[1]=%d\n",mbuf.size, mbuf.frames, mbuf.offsets[0], mbuf.offsets[1]);
   // 建立设备内存影射,将视频设备影射到内存空间 
   pmap = (unsigned char *)mmap(0, mbuf.size, PROT_READ|PROT_WRITE, MAP_SHARED, fd, 0);
   if ( pmap < 0)
   {
      perror("v4l_mmap_init:map");
      return -1;
   }
   map.width = width;   // width;
   map.height = height;  // height;
   map.format = picture.palette;
   printf("The video device was opened successfully.\n");
}
/*void MyWidget::get_image()
{
   //open_video();                                        
   imageptr = (unsigned short *)(pmap + mbuf.offsets[frame_current]);
   printf("imageptr is 0x%x",imageptr);
/*err:
 if(vd.fbfd)
  close(vd.fbfd);    // 关闭FrameBuffer设备
 
 if(vd.fd)
  close(vd.fd);
 exit(0);
 return 0;*/
//}*/
void MyWidget::paintEvent(QPaintEvent *)
{
    int x, y;
    int i = 0;
    QRgb *point;
    int r, g, b;
    int imgwidth=402;
    int imgheight=252;
    //创建该PaintEvent的操作对象paint
        //该对象为静态对象,不需要每次paint都创建一个QPainter
    static QPainter paint(frame);
         //读取内存内容,创建QImage对象
    QImage *image = new QImage();
    //该步很重要,设置标志
    //让QWidget在更新窗体时,不擦除原来的窗体
    //这样可以避免闪屏
    setWFlags(getWFlags() | Qt::WRepaintNoErase);
    
    //begin graping picture
    get_grab_frame();
    imageptr = (unsigned short *)(pmap + mbuf.offsets[frame_current]);
    //printf("imageptr is 0x%x",imageptr);
    
    frame_current ^= 1;//两帧采集不是0就是1
    if(image->create(imgwidth, imgheight, 32, 0, QImage::IgnoreEndian))
    {
        for(x = 0; x < imgheight; x++)
        {
            for(y = 0; y < imgwidth; y++)
            {
                r = (imageptr&0xf800)>>11;
                g = (imageptr&0x07e0)>>5;
                b = imageptr&0x001f;
                point = (QRgb *)image->scanLine(x)+y;
                *point = qRgb(r, g, b);
                i++;
                
            }
        }
    }
    //开始绘图  
    //paint.drawImage(0, 0, *image);
    paint.drawImage(QRect(40,34,402,252), *image);
    //printf("imageptr[0] is 0x%x",imageptr[0]);
    // paint.end();
    //释放空间
   /* if(vd->mbuf.size)
    {
        munmap(imageptr,vd->mbuf.size);
    }
    else if(imageptr)
    {
        free(imageptr);
    }
    delete image;
    image = NULL;*/
}
int main( int argc, char **argv )
{
    QApplication a( argc, argv );
    MyWidget *w=new MyWidget(0);
    w->setGeometry( 10, 20, 480, 320 );
    a.setMainWidget( w );
    w->show();
    w->open_video(); 
    //a.connect( &a, SIGNAL( lastWindowClosed() ), &a, SLOT( quit() ) );
    return a.exec();
}