//驾驶室摄像头
#include "audio_video_encode_0.h"
class Thread_VideoAudioEncode_0 thread_VideoAudioEncode_0; //编码线程
class VideoAudioEncode videoaudioencode_0;
unsigned char *video_yuv420p_buff=nullptr;//[VIDEO_WIDTH*VIDEO_HEIGHT*3/2];
unsigned char *video_yuv420p_buff_temp=nullptr;//[VIDEO_WIDTH*VIDEO_HEIGHT*3/2];
unsigned char *rgb_buffer=nullptr;
static unsigned char audio_write_buffer[2048];
static qint32 audio_write_cnt=0;
static QByteArray audio_buffer;//存放队列里的一帧音频数据
/*获取一帧音频数据 */
static AVFrame *get_audio_frame(OutputStream *ost)
{
AVFrame *frame = ost->tmp_frame;
int16_t *q = (int16_t*)frame->data[0];
/* 检查我们是否要生成更多帧----用于判断是否结束*/
if(av_compare_ts(ost->next_pts, ost->enc->time_base,
STREAM_DURATION, (AVRational){ 1, 1 }) >= 0)
{
// if(videoaudioencode.audio_data_queue.isEmpty())
return nullptr;
}
// qDebug("frame->nb_samples=%d\n",frame->nb_samples); //2048
// qDebug("ost->enc->channels=%d\n",ost->enc->channels); //1
//判断缓冲区的数据是否足够2048字节
if(audio_write_cnt+2048<=audio_buffer.size())
{
memcpy(audio_write_buffer,audio_buffer.data()+audio_write_cnt,2048);
audio_write_cnt+=2048; //每次累加2048
}
else
{
int temp_cnt=0;
if(audio_buffer.size()-audio_write_cnt>0) //上一次缓冲区结尾还有数据,但是不足1024
{
memcpy(audio_write_buffer,audio_buffer.data()+audio_write_cnt,audio_buffer.size()-audio_write_cnt);
temp_cnt=audio_buffer.size()-audio_write_cnt; //拷贝的字节数量
audio_write_cnt=0; //缓冲区的数据已经写完了
// qDebug()<<"temp_cnt="<<temp_cnt;
}
//判断当前缓冲区是否有数据 ,没有数据就从队列里取出一帧
//qDebug()<<"从队列里取出一帧数据.";
//等待队列里有数据才能取出来
while(videoaudioencode_0.audio_data_queue.isEmpty())
{
QThread::msleep(5);
if(videoaudioencode_0.run_flag==0)return nullptr;
}
//消费者 -从队列里取出数据
videoaudioencode_0.audio_encode_mutex.lock();
audio_buffer=videoaudioencode_0.audio_data_queue.dequeue();
#ifdef PCM_DATA_SAVE
pcm_data->write(audio_buffer);
#endif
videoaudioencode_0.audio_encode_mutex.unlock();
// qDebug()<<"out_put="<<videoaudioencode.audio_data_queue.size();
if(temp_cnt==0)
{
memcpy(audio_write_buffer,audio_buffer.data(),2048);
audio_write_cnt=2048;
}
else
{
memcpy(audio_write_buffer+temp_cnt,audio_buffer.data(),2048-temp_cnt);
audio_write_cnt=2048-temp_cnt;
}
}
// qDebug()<<"audio_write_cnt="<<audio_write_cnt; AV_SAMPLE_FMT_FLTP
//nb_samples: 此帧描述的音频样本数(每个通道)
//channels:音频通道数
//音频数据赋值 1024x2
memcpy(q,audio_write_buffer,frame->nb_samples*sizeof(int16_t)*ost->enc->channels);
frame->pts = ost->next_pts;
ost->next_pts += frame->nb_samples;
return frame;
}
/*
准备图像数据
YUV422占用内存空间 = w * h * 2
YUV420占用内存空间 = width*height*3/2
*/
static void fill_yuv_image(AVFrame *pict, int frame_index,int width, int height)
{
unsigned int y_size=width*height;
videoaudioencode_0.video_encode_mutex.lock();
videoaudioencode_0.video_WaitConditon.wait(&videoaudioencode_0.video_encode_mutex);
memcpy(video_yuv420p_buff_temp,video_yuv420p_buff,videoaudioencode_0.image_sizeo);
videoaudioencode_0.video_encode_mutex.unlock();
//将YUV数据拷贝到缓冲区 y_size=wXh
memcpy(pict->data[0],video_yuv420p_buff_temp,y_size);
memcpy(pict->data[1],video_yuv420p_buff_temp+y_size,y_size/4);
memcpy(pict->data[2],video_yuv420p_buff_temp+y_size+y_size/4,y_size/4);
}
static AVFrame *get_video_frame(OutputStream *ost)
{
AVCodecContext *c = ost->enc;
/* 检查我们是否要生成更多帧---判断是否结束录制 */
if(av_compare_ts(ost->next_pts, c->time_base,STREAM_DURATION, (AVRational){ 1, 1 }) >= 0)
return nullptr;
/*当我们将帧传递给编码器时,它可能会保留对它的引用
*内部; 确保我们在这里不覆盖它*/
if (av_frame_make_writable(ost->frame) < 0)
exit(1);
//获取图像
//DTS(解码时间戳)和PTS(显示时间戳)
fill_yuv_image(ost->frame, ost->next_pts, c->width, c->height);
ost->frame->pts = ost->next_pts++;
//视频帧添加水印
return ost->frame;
}
//析构函数
VideoReadThread_0::~VideoReadThread_0()
{
}
//停止视频采集
void VideoReadThread_0::stop()
{
qDebug()<<"停止视频采集--stop";
if(camera)
{
camera->stop();
delete camera;
camera=nullptr;
}
if(m_pProbe)
{
delete m_pProbe;
m_pProbe=nullptr;
}
if(timer)
{
timer->stop();
delete timer;
}
}
//执行线程
void VideoReadThread_0::run()
{
stop();
if(videoaudioencode_0.desktop_flag)
{
qDebug()<<"桌面开始采集数据";
//Windows系统截图
screen = QGuiApplication::primaryScreen();
timer=new QTimer;
connect(timer, SIGNAL(timeout()), this, SLOT(update()));
timer->start(70); //采集时间
}
else
{
Camear_Init();
qDebug()<<"摄像头开始采集数据";
}
}
//定时采集桌面的数据
void VideoReadThread_0::update()
{
QPixmap pixmap=screen->grabWindow(0); //获取当前屏幕的图像
QImage image=pixmap.toImage();
image=image.scaled(VIDEO_WIDTH,VIDEO_HEIGHT,Qt::KeepAspectRatio, Qt::SmoothTransformation);
//绘制图片水印
QDateTime dateTime(QDateTime::currentDateTime());
//时间效果: 2020-03-05 16:25::04 周一
QString qStr="";
qStr+=dateTime.toString("yyyy-MM-dd hh:mm:ss ddd");
QPainter pp(&image);
QPen pen = QPen(Qt::white);
pp.setPen(pen);
pp.drawText(QPointF(0,20),qStr);
//提取RGB数据
unsigned char *p=rgb_buffer;
for(int i=0;i<image.height();i++)
{
for(int j=0;j<image.width();j++)
{
QRgb rgb=image.pixel(j,i);
*p++=qRed(rgb);
*p++=qGreen(rgb);
*p++=qBlue(rgb);
}
}
videoaudioencode_0.video_encode_mutex.lock();
RGB24_TO_YUV420(rgb_buffer,image.width(),image.height(),video_yuv420p_buff);
videoaudioencode_0.video_encode_mutex.unlock();
videoaudioencode_0.video_WaitConditon.wakeAll();
emit VideoDataOutput(image.scaled(640,480,Qt::KeepAspectRatio, Qt::SmoothTransformation)); //发送信号
}
void VideoReadThread_0::Camear_Init()
{
/*创建摄像头对象,根据选择的摄像头打开*/
camera = new QCamera(videoaudioencode_0.camera);
m_pProbe = new QVideoProbe;
if(m_pProbe != nullptr)
{
m_pProbe->setSource(camera); // Returns true, hopefully.
connect(m_pProbe, SIGNAL(videoFrameProbed(QVideoFrame)),this, SLOT(slotOnProbeFrame(QVideoFrame)), Qt::QueuedConnection);
}
/*配置摄像头捕 QCamera *camera;
QVideoProbe *m_pProbe;获模式为帧捕获模式*/
//camera->setCaptureMode(QCamera::CaptureStillImage); //如果在Linux系统下运行就这样设置
camera->setCaptureMode(QCamera::CaptureVideo);//如果在android系统下运行就这样设置
/*启动摄像头*/
camera->start();
/*设置摄像头的采集帧率和分辨率*/
QCameraViewfinderSettings settings;
// settings.setPixelFormat(QVideoFrame::Format_YUYV); //设置像素格式 Android上只支持NV21格式
settings.setResolution(QSize(VIDEO_WIDTH,VIDEO_HEIGHT)); //设置摄像头的分辨率
camera->setViewfinderSettings(settings);
//获取摄像头支持的分辨率、帧率等参数
#if 0
int i=0;
QList<QCameraViewfin
- 1
- 2
- 3
前往页