#include "widget.h"
#include "ui_widget.h"
class VideoAudioEncode videoaudioencode_0;
Widget::Widget(QWidget *parent)
: QWidget(parent)
, ui(new Ui::Widget)
{
ui->setupUi(this);
//驾驶室摄像头
//工作对象
videoRead_WorkClass_0=new VideoReadThread_0;
videoRead_Workthread_0=new QThread;
//连接摄像头采集信号,在主线程实时显示视频画面
connect(videoRead_WorkClass_0,SIGNAL(VideoDataOutput(QImage )),this,SLOT(VideoDataDisplay_0(QImage )));
//摄像头初始化函数
connect(this,SIGNAL(Init_VideoAudio_WorkClass_0()),videoRead_WorkClass_0,SLOT(run()));
//停止视频采集
connect(this,SIGNAL(Stop_AudioVideo0_work_0()),videoRead_WorkClass_0,SLOT(stop()));
//将工作对象移动到子线程里工作
videoRead_WorkClass_0->moveToThread(videoRead_Workthread_0);
//更新设备列表
UpdateVideoAudiodDevice(ui->comboBox_video_0,ui->plainTextEdit_log_0);
QTimer::singleShot(100, this, SLOT(update()));
}
Widget::~Widget()
{
delete ui;
}
//人脸检测代码
void Widget::opencv_face(QImage qImage)
{
QTime time;
time.start();
//定义级联分类器
CascadeClassifier face_cascade;
//加载分类文件
if( !face_cascade.load("C:/OpenCV_3.4.7/OpenCV-MinGW-Build-OpenCV-3.4.7/etc/haarcascades/haarcascade_frontalface_alt.xml") )
{
Log_Display_0("haarcascade_frontalface_alt.xml 分类器加载错误.\n");
return;
}
Mat frame=QImage2cvMat(qImage);
cvtColor( frame, frame, COLOR_BGR2GRAY );//转换成灰度图像
std::vector<Rect> faces;
//正脸检测
face_cascade.detectMultiScale(frame,faces,1.1,3,0,Size(50,50),Size(500,500));
Log_Display_0(tr("耗时:%1 ms 人脸:%2\n").arg(time.elapsed()).arg(faces.size()));
ui->lcdNumber->display((int)faces.size());
for ( size_t i = 0; i < faces.size(); i++ )
{
#if 1
Point center(faces[i].x + faces[i].width/2, faces[i].y + faces[i].height/2);
ellipse(frame, center, Size( faces[i].width/2, faces[i].height/2 ), 0, 0, 360, Scalar( 255, 0, 255 ), 4 );
rectangle(frame,
cvPoint(cvRound(faces[i].x), cvRound(faces[i].y)),
cvPoint(cvRound((faces[i].x + faces[i].width-1)),
cvRound((faces[i].y + faces[i].height-1))),
Scalar(255, 255, 255), 3, 8, 0);
#endif
//提取人脸
Mat frame1;
for(size_t i=0;i<faces.size();i++)
{
Point center(faces[i].x + faces[i].width / 2, faces[i].y + faces[i].height / 2);
frame1= frame(Rect(faces[i].x, faces[i].y, faces[i].width, faces[i].height));
}
/*在控件上显示人脸*/
ui->label_face_display->setPixmap(QPixmap::fromImage(Mat2QImage(frame1)));
}
/*在控件上显示*/
QImage display_image=Mat2QImage(frame);
ui->label_display->setPixmap(QPixmap::fromImage(display_image));
}
Mat Widget::QImage2cvMat(QImage image)
{
Mat mat;
switch(image.format())
{
case QImage::Format_ARGB32:
case QImage::Format_RGB32:
case QImage::Format_ARGB32_Premultiplied:
mat = Mat(image.height(), image.width(), CV_8UC4, (void*)image.constBits(), image.bytesPerLine());
break;
case QImage::Format_RGB888:
mat = Mat(image.height(), image.width(), CV_8UC3, (void*)image.constBits(), image.bytesPerLine());
cvtColor(mat, mat, CV_BGR2RGB);
break;
case QImage::Format_Indexed8:
mat = Mat(image.height(), image.width(), CV_8UC1, (void*)image.constBits(), image.bytesPerLine());
break;
}
return mat;
}
QImage cvMat2QImage(const cv::Mat& mat)
{
}
QImage Widget::Mat2QImage(const Mat& mat)
{
// 8-bits unsigned, NO. OF CHANNELS = 1
if(mat.type() == CV_8UC1)
{
QImage image(mat.cols, mat.rows, QImage::Format_Indexed8);
// Set the color table (used to translate colour indexes to qRgb values)
image.setColorCount(256);
for(int i = 0; i < 256; i++)
{
image.setColor(i, qRgb(i, i, i));
}
// Copy input Mat
uchar *pSrc = mat.data;
for(int row = 0; row < mat.rows; row ++)
{
uchar *pDest = image.scanLine(row);
memcpy(pDest, pSrc, mat.cols);
pSrc += mat.step;
}
return image;
}
// 8-bits unsigned, NO. OF CHANNELS = 3
else if(mat.type() == CV_8UC3)
{
// Copy input Mat
const uchar *pSrc = (const uchar*)mat.data;
// Create QImage with same dimensions as input Mat
QImage image(pSrc, mat.cols, mat.rows, mat.step, QImage::Format_RGB888);
return image.rgbSwapped();
}
else if(mat.type() == CV_8UC4)
{
// Copy input Mat
const uchar *pSrc = (const uchar*)mat.data;
// Create QImage with same dimensions as input Mat
QImage image(pSrc, mat.cols, mat.rows, mat.step, QImage::Format_ARGB32);
return image.copy();
}
else
{
return QImage();
}
}
//显示检测的结果
void Widget::show_face(IplImage* img)
{
uchar *imgData=(uchar *)img->imageData;
QImage my_image=QImage(imgData,img->width,img->height,QImage::Format_RGB888);
my_image=my_image.rgbSwapped(); //BGR格式转RGB
QPixmap my_pix; //创建画图类
my_pix.convertFromImage(my_image);
/*在控件上显示*/
ui->label_display->setPixmap(my_pix);
}
//开始采集
void Widget::on_pushButton_Start_clicked()
{
//设置当前选择的摄像头
videoaudioencode_0.camera=video_dev_list.at(ui->comboBox_video_0->currentIndex());
Stop_VideoAudioEncode_0(true);
Start_VideoAudioEncode_Thread_0();
}
//析构函数
VideoReadThread_0::~VideoReadThread_0()
{
}
//停止视频采集
void VideoReadThread_0::stop()
{
qDebug()<<"停止视频采集--stop";
if(camera)
{
camera->stop();
delete camera;
camera=nullptr;
}
if(m_pProbe)
{
delete m_pProbe;
m_pProbe=nullptr;
}
}
//执行线程
void VideoReadThread_0::run()
{
stop();
Camear_Init();
qDebug()<<"摄像头开始采集数据";
}
void VideoReadThread_0::Camear_Init()
{
/*创建摄像头对象,根据选择的摄像头打开*/
camera = new QCamera(videoaudioencode_0.camera);
m_pProbe = new QVideoProbe;
if(m_pProbe != nullptr)
{
m_pProbe->setSource(camera); // Returns true, hopefully.
connect(m_pProbe, SIGNAL(videoFrameProbed(QVideoFrame)),this, SLOT(slotOnProbeFrame(QVideoFrame)), Qt::QueuedConnection);
}
/*配置摄像头捕 QCamera *camera;
QVideoProbe *m_pProbe;获模式为帧捕获模式*/
//camera->setCaptureMode(QCamera::CaptureStillImage); //如果在Linux系统下运行就这样设置
camera->setCaptureMode(QCamera::CaptureVideo);//如果在android系统下运行就这样设置
/*启动摄像头*/
camera->start();
/*设置摄像头的采集帧率和分辨率*/
QCameraViewfinderSettings settings;
// settings.setPixelFormat(QVideoFrame::Format_YUYV); //设置像素格式 Android上只支持NV21格式
settings.setResolution(QSize(VIDEO_WIDTH,VIDEO_HEIGHT)); //设置摄像头的分辨率
camera->setViewfinderSettings(settings);
}
/*
函数功能: 将YUV数据转为RGB格式
函数参数:
unsigned char *yuv_buffer: YUV源数据
unsigned char *rgb_buffer: 转换之后的RGB数据
int iWidth,int iHeight : 图像的宽度和高度
*/
void yuyv_to_rgb(unsigned char *yuv_buffer,unsigned char *rgb_buffer,int iWidth,int iHeight)
{
int x;
int z=0;
unsigned char *ptr = rgb_buffer;
unsigned char *yuyv= yuv_buffer;
for (x = 0; x < iWidth*iHeight; x++)
{
int r, g, b;
int y, u, v;
if (!z)
y = yuyv[0] << 8;
else
y = yuyv[2] << 8;
u = yuyv[1] - 128;
v = yuyv[3] - 128;
r = (y + (359 * v)) >> 8;
g = (y - (88 * u) -