首页 > 代码库 > 移植opencv到开发板,摄像头在开发板6410上的采集使用(2)

移植opencv到开发板,摄像头在开发板6410上的采集使用(2)

在虚拟机搭建好系统后,真正的使用才刚刚开始。

在使用摄像头的时候,首先看自己的摄像头插上去是显示jpeg的还是yuv的

yuv的要实现UVC转QImage转IplImage这样的流程才能使用,jpeg的好多人不会用说没用其实最好用了。一点不卡。yuv的有点卡。

 

我用的也是yuv以前朋友用的jpeg的摄像头。

代码是用的网上的一个代码修改的

1. [文件] videodevice.h ?

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
#ifndef VIDEODEVICE_H
#define VIDEODEVICE_H#include <string.h>
#include <stdlib.h>
#include <errno.h>
#include <fcntl.h>
 #include <sys/ioctl.h>
#include <sys/mman.h>#include <asm/types.h>
#include <linux/videodev2.h>#include <QString>
#include <QObject>#define CLEAR(x) memset(&(x), 0, sizeof(x))class VideoDevice :public QObject
{
    Q_OBJECTpublic:
    VideoDevice(QString dev_name);
    //VideoDevice();    int open_device();
    intclose_device();
    intinit_device();
    intstart_capturing();
    intstop_capturing();
    intuninit_device();
    intget_frame(void**, size_t*);
    intunget_frame();
 private:
    intinit_mmap();
  
    structbuffer
    {
        void* start;
        size_tlength;
    };
    QString dev_name;
    intfd;
    buffer* buffers;
    unsignedint n_buffers;
    intindex;
  
signals:
    voiddisplay_error(QString);
  
};
 #endif // VIDEODEVICE_H

2. [文件] videodevice.cpp 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
#include "videodevice.h"
VideoDevice::VideoDevice(QString dev_name)
{
    this->dev_name = dev_name;
    this->fd = -1;
    this->buffers = NULL;
    this->n_buffers = 0;
    this->index = -1;
  
}
 intVideoDevice::open_device()
{
    fd = open(dev_name.toStdString().c_str(), O_RDWR/*|O_NONBLOCK*/, 0);
   // fd = open(dev_name.toStdString().c_str(), O_RDWR|O_NONBLOCK, 0);
    if(-1 == fd)
    {
        emit display_error(tr("open: %1").arg(QString(strerror(errno))));
        return-1;
    }
    return0;
}
 intVideoDevice::close_device()
{
    if(-1 == close(fd))
    {
        emit display_error(tr("close: %1").arg(QString(strerror(errno))));
        return-1;
    }
    return0;
}
 intVideoDevice::init_device()
{
    v4l2_capability cap;
    v4l2_cropcap cropcap;
    v4l2_crop crop;
    v4l2_format fmt;
  
    if(-1 == ioctl(fd, VIDIOC_QUERYCAP, &cap))
    {
        if(EINVAL ==errno)
        {
            emit display_error(tr("%1 is no V4l2 device").arg(dev_name));
        }
        else
        {
            emit display_error(tr("VIDIOC_QUERYCAP: %1").arg(QString(strerror(errno))));
        }
        return-1;
    }
  
    if(!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
    {
        emit display_error(tr("%1 is no video capture device").arg(dev_name));
        return-1;
    }
  
    if(!(cap.capabilities & V4L2_CAP_STREAMING))
    {
        emit display_error(tr("%1 does not support streaming i/o").arg(dev_name));
        return-1;
    }
  
    CLEAR(cropcap);
  
    cropcap.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  
    if(0 == ioctl(fd, VIDIOC_CROPCAP, &cropcap))
    {
        CLEAR(crop);
        crop.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        crop.c = cropcap.defrect;
  
        if(-1 == ioctl(fd, VIDIOC_S_CROP, &crop))
        {
            if(EINVAL ==errno)
            {//                emit display_error(tr("VIDIOC_S_CROP not supported"));            }
            else
            {
                emit display_error(tr("VIDIOC_S_CROP: %1").arg(QString(strerror(errno))));
                return-1;
            }
        }
    }
    else
    {
        emit display_error(tr("VIDIOC_CROPCAP: %1").arg(QString(strerror(errno))));
        return-1;
    }
  
    CLEAR(fmt);
  
    fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    fmt.fmt.pix.width = 640;
    fmt.fmt.pix.height = 480;
    fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV;
    fmt.fmt.pix.field = V4L2_FIELD_INTERLACED;
  
    if(-1 == ioctl(fd, VIDIOC_S_FMT, &fmt))
    {
        emit display_error(tr("VIDIOC_S_FMT").arg(QString(strerror(errno))));
        return-1;
    }
  
    if(-1 == init_mmap())
    {
        return-1;
    }
  
    return0;
}
 intVideoDevice::init_mmap()
{
    v4l2_requestbuffers req;
    CLEAR(req);
  
    req.count = 4;
    req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    req.memory = V4L2_MEMORY_MMAP;
  
    if(-1 == ioctl(fd, VIDIOC_REQBUFS, &req))
    {
        if(EINVAL ==errno)
        {
            emit display_error(tr("%1 does not support memory mapping").arg(dev_name));
            return-1;
        }
        else
        {
            emit display_error(tr("VIDIOC_REQBUFS %1").arg(QString(strerror(errno))));
            return-1;
        }
    }
  
    if(req.count < 2)
    {
        emit display_error(tr("Insufficient buffer memory on %1").arg(dev_name));
        return-1;
    }
  
    buffers = (buffer*)calloc(req.count,sizeof(*buffers));
  
    if(!buffers)
    {
        emit display_error(tr("out of memory"));
        return-1;
    }
  
    for(n_buffers = 0; n_buffers < req.count; ++n_buffers)
    {
        v4l2_buffer buf;
        CLEAR(buf);
  
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory = V4L2_MEMORY_MMAP;
        buf.index = n_buffers;
  
        if(-1 == ioctl(fd, VIDIOC_QUERYBUF, &buf))
        {
            emit display_error(tr("VIDIOC_QUERYBUF: %1").arg(QString(strerror(errno))));
            return-1;
        }
  
        buffers[n_buffers].length = buf.length;
        buffers[n_buffers].start =
                mmap(NULL,// start anywhere                      buf.length,
                     PROT_READ | PROT_WRITE,
                     MAP_SHARED,
                     fd, buf.m.offset);
  
        if(MAP_FAILED == buffers[n_buffers].start)
        {
            emit display_error(tr("mmap %1").arg(QString(strerror(errno))));
            return-1;
        }
    }
    return0;
  
}
 intVideoDevice::start_capturing()
{
    unsignedint i;
    for(i = 0; i < n_buffers; ++i)
    {
        v4l2_buffer buf;
        CLEAR(buf);
  
        buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        buf.memory =V4L2_MEMORY_MMAP;
        buf.index = i;//        fprintf(stderr, "n_buffers: %d\n", i);
        if(-1 == ioctl(fd, VIDIOC_QBUF, &buf))
        {
            emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
            return-1;
        }
    }
  
    v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  
    if(-1 == ioctl(fd, VIDIOC_STREAMON, &type))
    {
        emit display_error(tr("VIDIOC_STREAMON: %1").arg(QString(strerror(errno))));
        return-1;
    }
    return0;
}
 intVideoDevice::stop_capturing()
{
    v4l2_buf_type type;
    type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
  
    if(-1 == ioctl(fd, VIDIOC_STREAMOFF, &type))
    {
        emit display_error(tr("VIDIOC_STREAMOFF: %1").arg(QString(strerror(errno))));
        return-1;
    }
    return0;
}
 intVideoDevice::uninit_device()
{
    unsignedint i;
    for(i = 0; i < n_buffers; ++i)
    {
        if(-1 == munmap(buffers[i].start, buffers[i].length))
        {
            emit display_error(tr("munmap: %1").arg(QString(strerror(errno))));
            return-1;
        }
  
    }
    free(buffers);
    return0;
}
 intVideoDevice::get_frame(void**frame_buf, size_t* len)
{
    v4l2_buffer queue_buf;
    CLEAR(queue_buf);
  
    queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
    queue_buf.memory = V4L2_MEMORY_MMAP;
  
    if(-1 == ioctl(fd, VIDIOC_DQBUF, &queue_buf))
    {
        switch(errno)
        {
        caseEAGAIN: //            perror("dqbuf");            return -1;
        caseEIO:
            return-1 ;
        default:
            emit display_error(tr("VIDIOC_DQBUF: %1").arg(QString(strerror(errno))));
            return-1;
        }
    }
  
    *frame_buf = buffers[queue_buf.index].start;
    *len = buffers[queue_buf.index].length;
    index = queue_buf.index;
  
    return0;
  
}
 intVideoDevice::unget_frame()
{
    if(index != -1)
    {
        v4l2_buffer queue_buf;
        CLEAR(queue_buf);
  
        queue_buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
        queue_buf.memory = V4L2_MEMORY_MMAP;
        queue_buf.index = index;
  
        if(-1 == ioctl(fd, VIDIOC_QBUF, &queue_buf))
        {
            emit display_error(tr("VIDIOC_QBUF: %1").arg(QString(strerror(errno))));
            return-1;
        }
        return0;
    }
    return-1;
}

3. [文件] processImage.h 

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
#ifndef PROCESSIMAGE_H
#define PROCESSIMAGE_H#include <QtGui>
#include "videodevice.h"class ProcessImage :public QWidget
{
    Q_OBJECTpublic:
    ProcessImage(QWidget *parent=0);
    ~ProcessImage();
 private:
    QPainter *painter;
    QLabel *label;
    QImage *frame;
    //QPixmap *frame;    QTimer *timer;
    intrs;
    uchar *pp;
    uchar * p;
    unsignedint len;
    intconvert_yuv_to_rgb_pixel(inty, int u, int v);
    intconvert_yuv_to_rgb_buffer(unsigned char*yuv, unsigned char*rgb, unsigned intwidth, unsigned intheight);
    VideoDevice *vd;
 privateslots:
    voidpaintEvent(QPaintEvent *);
    voiddisplay_error(QString err);
  
  
};
 #endif

4. [文件] processImage.cpp

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
#include <QtGui>
#include "processImage.h"
#include "videodevice.h"
extern"C"
{ #include <stdio.h>
#include <stdlib.h>}
ProcessImage::ProcessImage(QWidget *parent):QWidget(parent)
{
    pp = (unsignedchar *)malloc(640 * 480/*QWidget::width()*QWidget::height()*/* 3 * sizeof(char));
    painter =new QPainter(this);
    frame =new QImage(pp,640,480,QImage::Format_RGB888);
   // frame = new QPixmap(640,320);    label = newQLabel();
    vd =new VideoDevice(tr("/dev/video0"));
  
    connect(vd, SIGNAL(display_error(QString)),this,SLOT(display_error(QString)));
    rs = vd->open_device();
    if(-1==rs)
    {
        QMessageBox::warning(this,tr("error"),tr("open /dev/dsp error"),QMessageBox::Yes);
        vd->close_device();
    }
  
    rs = vd->init_device();
    if(-1==rs)
    {
        QMessageBox::warning(this,tr("error"),tr("init failed"),QMessageBox::Yes);
        vd->close_device();
    }
  
    rs = vd->start_capturing();
    if(-1==rs)
    {
        QMessageBox::warning(this,tr("error"),tr("start capture failed"),QMessageBox::Yes);
        vd->close_device();
    }
  
    if(-1==rs)
    {
        QMessageBox::warning(this,tr("error"),tr("get frame failed"),QMessageBox::Yes);
        vd->stop_capturing();
    }
  
    timer =new QTimer(this);
    connect(timer,SIGNAL(timeout()),this,SLOT(update()));
    timer->start(30);
  
    QHBoxLayout *hLayout =new QHBoxLayout();
    hLayout->addWidget(label);
    setLayout(hLayout);
    setWindowTitle(tr("Capture"));
}
  
ProcessImage::~ProcessImage()
{
    rs = vd->stop_capturing();
    rs = vd->uninit_device();
    rs = vd->close_device();
}
 voidProcessImage::paintEvent(QPaintEvent *)
{
    rs = vd->get_frame((void**)&p,&len);
    convert_yuv_to_rgb_buffer(p,pp,640,480/*QWidget::width(),QWidget::height()*/);
    frame->loadFromData((uchar *)pp,/*len*/640 * 480 * 3 *sizeof(char));
 //    painter->begin(this);
//    painter->drawImage(0,0,*frame);
//    painter->end();
//    rs = vd->unget_frame();   // frame->load("./img3.jpg");
    label->setPixmap(QPixmap::fromImage(*frame,Qt::AutoColor));
   // label->show();    rs = vd->unget_frame();
   // label->drawFrame();
    //    QPixmap *pixImage = new QPixmap();
//    pixImage->loadFromData((uchar *)pp,sizeof(pp),0,Qt::AutoColor);
//    QPainter painter(this);
//    painter.begin(this);
//    painter.drawPixmap(0,0,QWidget::width(),QWidget::height(),*pixImage);
//    painter.end();}
 voidProcessImage::display_error(QString err)
{
    QMessageBox::warning(this,tr("error"), err,QMessageBox::Yes);
}
 /*yuv格式转换为rgb格式*/int ProcessImage::convert_yuv_to_rgb_buffer(unsignedchar *yuv, unsignedchar *rgb, unsignedint width, unsignedint height)
{
 unsignedint in, out = 0;
 unsignedint pixel_16;
 unsignedchar pixel_24[3];
 unsignedint pixel32;
 inty0, u, y1, v;
 for(in = 0; in < width * height * 2; in += 4) {
  pixel_16 =
   yuv[in + 3] << 24 |
   yuv[in + 2] << 16 |
   yuv[in + 1] <<  8 |
   yuv[in + 0];
  y0 = (pixel_16 & 0x000000ff);
  u  = (pixel_16 & 0x0000ff00) >>  8;
  y1 = (pixel_16 & 0x00ff0000) >> 16;
  v  = (pixel_16 & 0xff000000) >> 24;
  pixel32 = convert_yuv_to_rgb_pixel(y0, u, v);
  pixel_24[0] = (pixel32 & 0x000000ff);
  pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
  pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
  rgb[out++] = pixel_24[0];
  rgb[out++] = pixel_24[1];
  rgb[out++] = pixel_24[2];
  pixel32 = convert_yuv_to_rgb_pixel(y1, u, v);
  pixel_24[0] = (pixel32 & 0x000000ff);
  pixel_24[1] = (pixel32 & 0x0000ff00) >> 8;
  pixel_24[2] = (pixel32 & 0x00ff0000) >> 16;
  rgb[out++] = pixel_24[0];
  rgb[out++] = pixel_24[1];
  rgb[out++] = pixel_24[2];
 }
 return0;
}
 intProcessImage::convert_yuv_to_rgb_pixel(inty, int u, int v)
{
 unsignedint pixel32 = 0;
 unsignedchar *pixel = (unsignedchar *)&pixel32;
 intr, g, b;
 r = y + (1.370705 * (v-128));
 g = y - (0.698001 * (v-128)) - (0.337633 * (u-128));
 b = y + (1.732446 * (u-128));
 if(r > 255) r = 255;
 if(g > 255) g = 255;
 if(b > 255) b = 255;
 if(r < 0) r = 0;
 if(g < 0) g = 0;
 if(b < 0) b = 0;
 pixel[0] = r * 220 / 256;
 pixel[1] = g * 220 / 256;
 pixel[2] = b * 220 / 256;
 returnpixel32;
} /*yuv格式转换为rgb格式*/

5. [文件] main.cpp 

1
2
3
4
5
6
7
8
9
10
11
12
#include <QtGui>
#include "processImage.h"int main(intargc,char *argv[])
{
    QApplication app(argc,argv);
    ProcessImage process;
    process.resize(640,480);
    process.show();
  
    returnapp.exec();
}

可以复制也可以到我的资源里去找有的我上传了字库的移植,还有这个代码。

先在虚拟机上跑起来一般没问题 记得设备号在虚拟机为/dev/video0 至于虚拟机用摄像头看我别的博文

跑好了再交叉编译记得改设备号为/dev/video2具体的看你自己插拔摄像头在dev里是哪个。

然后有的人直接可以使用了,但是有的一堆问题的我就是

第一个问题::s3c-fimc: invalid target size

把这句话添加进去就没有问题了
fmt.fmt.pix.priv=1;

这句话加在cpp里的有个地方差不多全是这种。

第二个问题就是 :: segmentation fault

我够倒霉的全遇到了 解决花了一周。最后没办法我单步调试的

在in=155644的时候就会出现我看了下大小计算发现问题出在640*480上面,容易内存溢出,我改成320*240就不会超过155644了这样问题就解决了。

当时还有点小激动呢。图片小了你可以用opencv再放大的。

最后开发板图像出来了 这里不拍照随便拿个把

需要的到我资源里下载代码 字库什么的

移植opencv到开发板,摄像头在开发板6410上的采集使用(2)