#include "yuvcapture.h"
#define WIDTH 640
#define HEIGHT 480
YuvCapture::YuvCapture(int fd) :fd(fd)
{}
YuvCapture::~YuvCapture()
{
StopCapture();
UninitCapture();
free(this->framebuf);
close(fd);
}
//初始化缓存
int YuvCapture::InitMmep()
{
struct v4l2_requestbuffers req;
struct v4l2_buffer buf;
memset(&req, 0, sizeof(struct v4l2_requestbuffers));
memset(&buf, 0, sizeof(struct v4l2_buffer));
req.count = 4;
req.memory = V4L2_MEMORY_MMAP;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
//分配内存
if (camera_ioctl(VIDIOC_REQBUFS, &req) == -1)
{
perror("fail to request buffer");
}
if (req.count < 2)
{
printf("Insufficient buffer memory\n");
return -1;
}
cout << "frame buffer request done" << endl;
//分配用户态内存
framebuf = (video_buf_t *)calloc(req.count, sizeof(video_buf_t));
if (!framebuf)
{
printf("frambuf err calloc");
return -1;
}
for (int i = 0; i < req.count; i++)
{
buf.index = i;
//回收缓冲
if (camera_ioctl(VIDIOC_QUERYBUF, &buf) == -1)
{
perror("VIDIOC_QUERYBUF failed!\n");
return -1;
}
//mmap buffer
framebuf[i].length = buf.length;
framebuf[i].start = mmap(NULL, buf.length,
PROT_READ | PROT_WRITE,
MAP_SHARED, fd, buf.m.offset);
if (framebuf[i].start == MAP_FAILED)
{
perror("mmap failed!\n");
return -1;
}
}
}
//初始化摄像头采集设备
int YuvCapture::InitCaputer()
{
if(fd == -1) return -1;
//检查摄像头设备,获取信息。
v4l2_capability cap;
if (-1 == camera_ioctl(VIDIOC_QUERYCAP, &cap))
{
perror("ictol cap!");
return -1;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE))
{
fprintf(stderr, "is no video capture device\n");
return -1;
}
if (!(cap.capabilities & V4L2_CAP_STREAMING))
{
fprintf(stderr, "does not support streaming i/o\n");
return -1;
}
//打印摄像头相关信息
printf("\nVIDOOC_QUERYCAP\n");
printf("the camera driver is %s\n", cap.driver);
printf("the camera card is %s\n", cap.card);
printf("the camera bus info is %s\n", cap.bus_info);
printf("the version is %d\n", cap.version);
//摄像头所支持的像素格式
v4l2_fmtdesc fmtdesc;
fmtdesc.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmtdesc.index = 0;
printf("Support format:\n");
while (camera_ioctl(VIDIOC_ENUM_FMT, &fmtdesc) != -1)
{
printf("\t%d.%s\n", fmtdesc.index + 1, fmtdesc.description);
fmtdesc.index++;
}
//设置像素格式
v4l2_format fmt;
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = WIDTH;
fmt.fmt.pix.height = HEIGHT;
fmt.fmt.pix.pixelformat = V4L2_PIX_FMT_YUYV; //yuv422
fmt.fmt.pix.field = V4L2_FIELD_INTERLACED; //隔行扫描
//应用设置格式
if (camera_ioctl(VIDIOC_S_FMT, &fmt) < 0)
{
printf("VIDIOC_S_FMT\n");
return -1;
}
//读出设置查看是否设置格式成功
if (camera_ioctl(VIDIOC_G_FMT, &fmt) < 0)
{
printf("VIDIOC_S_FMT\n");
return -1;
}
printf("fmt.type:\t\t%d\n", fmt.type);
printf("pix.pixelformat:\t%c%c%c%c\n", fmt.fmt.pix.pixelformat & 0xFF, (fmt.fmt.pix.pixelformat >> 8) & 0xFF, (fmt.fmt.pix.pixelformat >> 16) & 0xFF, (fmt.fmt.pix.pixelformat >> 24) & 0xFF);
printf("pix.height:\t\t%d\n", fmt.fmt.pix.height);
printf("pix.width:\t\t%d\n", fmt.fmt.pix.width);
printf("pix.field:\t\t%d\n", fmt.fmt.pix.field);
//设置fps
struct v4l2_streamparm parm;
parm.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
parm.parm.capture.timeperframe.numerator = 10;
parm.parm.capture.timeperframe.denominator = 10;
if (ioctl(fd, VIDIOC_S_PARM, &parm) == -1)
{
perror("VIDIOC_S_PARM failed\n");
return false;
}
printf("init camera success!\n");
//初始化缓存
InitMmep();
}
//开始采集
int YuvCapture::StartCapture()
{
unsigned int i;
v4l2_buffer buf;
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
//放回缓冲区
for (i = 0; i < 4; i++)
{
buf.index = i;
if (camera_ioctl(VIDIOC_QBUF, &buf) < 0)
{
perror("VIDIOC_DQBUF err\n");
return -1;
}
}
//开始采集
if (camera_ioctl(VIDIOC_STREAMON, &type) < 0)
{
printf("VIDIOC_STREAMON\n");
return -1;
}
cout << "satart capture....." << endl;
}
//暂停采集
int YuvCapture::StopCapture()
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
if (camera_ioctl(VIDIOC_STREAMOFF, &type) < 0)
{
perror("VIDIOC_STREAMOFF");
}
printf("Stop caputer...\n");
return -1;
}
//读取一帧数据
int YuvCapture::ReadOneFrame(unsigned char *outBuf)
{
v4l2_buffer buf;
memset(&buf, 0, sizeof(struct v4l2_buffer));
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
buf.memory = V4L2_MEMORY_MMAP;
if (camera_ioctl(VIDIOC_DQBUF, &buf) < 0)
{
switch (errno)
{
case EAGAIN:
return 0;
case EIO:
/* Could ignore EIO, see spec. */
/* fall through */
default:
return -1;
}
}
Yuv422ToYuv420((unsigned char *)framebuf[buf.index].start, outBuf, WIDTH, HEIGHT);
//放回缓冲
if (camera_ioctl(VIDIOC_QBUF, &buf) < 0)
{
printf("VIDIOC_DQBUF ERROR\n");
return -1;
}
}
//解除内存映射
int YuvCapture::UninitCapture()
{
unsigned int i;
for (i = 0; i < 4; i++)
{
if (munmap(framebuf[i].start, framebuf[i].length) < 0)
{
printf("munmap\n");
break;
}
}
printf("ummp buf....\n");
free(framebuf);
return 1;
}
//YUV422转YUV420
int YuvCapture::Yuv422ToYuv420(unsigned char *yuv422, unsigned char *yuv420, int width, int height)
{
int ynum=width*height;
int i,j,k=0;
//得到Y分量
for(i=0;i<ynum;i++){
yuv420[i]=yuv422[i*2];
}
//得到U分量
for(i=0;i<height;i++){
if((i%2)!=0)continue;
for(j=0;j<(width/2);j++){
if((4*j+1)>(2*width))break;
yuv420[ynum+k*2*width/4+j]=yuv422[i*2*width+4*j+1];
}
k++;
}
k=0;
//得到V分量
for(i=0;i<height;i++){
if((i%2)==0)continue;
for(j=0;j<(width/2);j++){
if((4*j+3)>(2*width))break;
yuv420[ynum+ynum/4+k*2*width/4+j]=yuv422[i*2*width+4*j+3];
}
k++;
}
return 1;
}
int YuvCapture::camera_ioctl(int request, void *arg)
{
int r = -1;
do
{
r = ioctl(fd, request, arg);
} while (r < 0 && EINTR == errno);
return r;
}
Linux(Ubutun)V4L2采集YUV422转YUV420,ALSA采集pcm、QT播放音频QOpenGL显示
需积分: 50 175 浏览量
2020-03-07
14:31:58
上传
评论
收藏 12KB ZIP 举报
Jaygee-
- 粉丝: 29
- 资源: 2
最新资源
- 论文(最终)_20240430235101.pdf
- 基于python编写的Keras深度学习框架开发,利用卷积神经网络CNN,快速识别图片并进行分类
- 最全空间计量实证方法(空间杜宾模型和检验以及结果解释文档).txt
- 5uonly.apk
- 蓝桥杯Python组的历年真题
- 2023-04-06-项目笔记 - 第一百十九阶段 - 4.4.2.117全局变量的作用域-117 -2024.04.30
- 2023-04-06-项目笔记 - 第一百十九阶段 - 4.4.2.117全局变量的作用域-117 -2024.04.30
- 前端开发技术实验报告:内含4四实验&实验报告
- Highlight Plus v20.0.1
- 林周瑜-论文.docx
资源上传下载、课程学习等过程中有任何疑问或建议,欢迎提出宝贵意见哦~我们会及时处理!
点击此处反馈