#include "videothread.h"
#include <QImage>
#include <QTime>
#include <QCoreApplication>
#include <QEventLoop>
#include <QDebug>
VideoThread::VideoThread()
{
}
void VideoThread::run()
{
AVFormatContext *pFormatCtx;
int i, videoindex;
AVCodecContext *pCodecCtx;
AVCodec *pCodec;
AVFrame *pFrame, *pFrameRGB;
unsigned char *out_buffer;
AVPacket *packet;
int ret, got_picture;
struct SwsContext *img_convert_ctx;
AVDictionary *optionsDict = NULL;
av_dict_set(&optionsDict, "rtsp_transport", "tcp", 0);
av_dict_set(&optionsDict, "stimeout", "2000000", 0);
av_dict_set(&optionsDict, "framerate", "20", 0);
std::string strResp = "rtsp://admin:admin@192.168.1.89/11";
//char filepath[] = "D:\\QtProjects\\video\\1.mp4";
//初始化编解码库
av_register_all();//创建AVFormatContext对象,与码流相关的结构。
avdevice_register_all();
avformat_network_init();
pFormatCtx = avformat_alloc_context();
//初始化pFormatCtx结构
if (avformat_open_input(&pFormatCtx, strResp.c_str(), NULL, &optionsDict) != 0)
//if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0)
{
qDebug("Couldn't open input stream.\n");
return ;
}
//获取音视频流数据信息
if (avformat_find_stream_info(pFormatCtx, NULL) < 0){
qDebug("Couldn't find stream information.\n");
return ;
}
videoindex = -1;
//nb_streams视音频流的个数,这里当查找到视频流时就中断了。
for (i = 0; i < pFormatCtx->nb_streams; i++)
if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO){
videoindex = i;
break;
}
if (videoindex == -1){
qDebug("Didn't find a video stream.\n");
return ;
}
//获取视频流编码结构
pCodecCtx = pFormatCtx->streams[videoindex]->codec;
//查找解码器
pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
if (pCodec == NULL){
qDebug("Codec not found.\n");
return ;
}
//用于初始化pCodecCtx结构
if (avcodec_open2(pCodecCtx, pCodec, NULL) < 0){
qDebug("Could not open codec.\n");
return ;
}
//创建帧结构,此函数仅分配基本结构空间,图像数据空间需通过av_malloc分配
pFrame = av_frame_alloc();
pFrameRGB = av_frame_alloc();
//创建动态内存,创建存储图像数据的空间
//av_image_get_buffer_size获取一帧图像需要的大小
out_buffer = (unsigned char *)av_malloc(av_image_get_buffer_size(AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1));
av_image_fill_arrays(pFrameRGB->data, pFrameRGB->linesize, out_buffer,
AV_PIX_FMT_RGB32, pCodecCtx->width, pCodecCtx->height, 1);
packet = (AVPacket *)av_malloc(sizeof(AVPacket));
//初始化img_convert_ctx结构
img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
pCodecCtx->width, pCodecCtx->height, AV_PIX_FMT_RGB32, SWS_BICUBIC, NULL, NULL, NULL);
//获取帧率
AVStream *stream=pFormatCtx->streams[0];
int frame_rate=stream->avg_frame_rate.num/stream->avg_frame_rate.den;//每秒多少帧
qDebug() << "frame_rate:" << frame_rate;
//av_read_frame读取一帧未解码的数据
while (av_read_frame(pFormatCtx, packet) >= 0){
//如果是视频数据
if (packet->stream_index == videoindex){
//解码一帧视频数据
ret = avcodec_decode_video2(pCodecCtx, pFrame, &got_picture, packet);
if (ret < 0){
qDebug("Decode Error.\n");
return ;
}
if (got_picture){
sws_scale(img_convert_ctx, (const unsigned char* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height,
pFrameRGB->data, pFrameRGB->linesize);
QImage img((uchar*)pFrameRGB->data[0],pCodecCtx->width,pCodecCtx->height,QImage::Format_RGB32);
QImage image = img.copy(); //把图像复制一份 传递给界面显示
emit sendGetFrame(image); //发送信号
}
}
av_free_packet(packet);
msleep(0.01);
//Delay(10);
}
sws_freeContext(img_convert_ctx);
av_frame_free(&pFrameRGB);
av_frame_free(&pFrame);
avcodec_close(pCodecCtx);
avformat_close_input(&pFormatCtx);
}
评论0