#include "ffmpeg.h"
FFmpegThread::FFmpegThread(QObject *parent) : QThread(parent)
{
setObjectName("FFmpegThread");
stopped = false;
isPlay = false;
frameFinish = false;
videoWidth = 0;
videoHeight = 0;
oldWidth = 0;
oldHeight = 0;
videoStreamIndex = -1;
audioStreamIndex = -1;
url = "rtsp://172.23.100.88:554/1";
buffer = NULL;
avPacket = NULL;
avFrame = NULL;
avFrame2 = NULL;
avFrame3 = NULL;
avFormatContext = NULL;
videoCodec = NULL;
audioCodec = NULL;
swsContext = NULL;
options = NULL;
videoDecoder = NULL;
audioDecoder = NULL;
//初始化注册,一个软件中只注册一次即可
FFmpegThread::initlib();
}
//一个软件中只需要初始化一次就行
void FFmpegThread::initlib()
{
static QMutex mutex;
QMutexLocker locker(&mutex);
static bool isInit = false;
if (!isInit) {
//注册库中所有可用的文件格式和解码器
av_register_all();
//注册所有设备,主要用于本地摄像机播放支持
#ifdef ffmpegdevice
avdevice_register_all();
#endif
//初始化网络流格式,使用网络流时必须先执行
avformat_network_init();
isInit = true;
qDebug() << TIMEMS << "init ffmpeg lib ok" << " version:" << FFMPEG_VERSION;
#if 0
//输出所有支持的解码器名称
QStringList listCodeName;
AVCodec *code = av_codec_next(NULL);
while (code != NULL) {
listCodeName << code->name;
code = code->next;
}
qDebug() << TIMEMS << listCodeName;
#endif
}
}
bool FFmpegThread::init()
{
//在打开码流前指定各种参数比如:探测时间/超时时间/最大延时等
//设置缓存大小,1080p可将值调大
av_dict_set(&options, "buffer_size", "425984", 0);
//以tcp方式打开,如果以udp方式打开将tcp替换为udp
av_dict_set(&options, "rtsp_transport", "udp", 0);
//设置超时断开连接时间,单位微秒,3000000表示3秒
av_dict_set(&options, "stimeout", "3000000", 0);
//设置最大时延,单位微秒,1000000表示1秒
av_dict_set(&options, "max_delay", "1000000", 0);
//自动开启线程数
av_dict_set(&options, "threads", "auto", 0);
//打开视频流
avFormatContext = avformat_alloc_context();
int result = avformat_open_input(&avFormatContext, url.toStdString().data(), NULL, &options);
if (result < 0) {
qDebug() << TIMEMS << "open input error" << url;
return false;
}
//释放设置参数
if (options != NULL) {
av_dict_free(&options);
}
//获取流信息
result = avformat_find_stream_info(avFormatContext, NULL);
if (result < 0) {
qDebug() << TIMEMS << "find stream info error";
return false;
}
//----------视频流部分开始,打个标记方便折叠代码----------
if (1) {
videoStreamIndex = av_find_best_stream(avFormatContext, AVMEDIA_TYPE_VIDEO, -1, -1, &videoDecoder, 0);
if (videoStreamIndex < 0) {
qDebug() << TIMEMS << "find video stream index error";
return false;
}
//获取视频流
AVStream *videoStream = avFormatContext->streams[videoStreamIndex];
//获取视频流解码器,或者指定解码器
// videoCodec = videoStream->codec;
// videoDecoder = avcodec_find_decoder(videoCodec->codec_id);
// //videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
// if (videoDecoder == NULL) {
// qDebug() << TIMEMS << "video decoder not found";
// return false;
// }
//获取视频流解码器,或者指定解码器
AVCodecParameters* codecpr = avFormatContext->streams[videoStreamIndex]->codecpar;
videoDecoder = avcodec_find_decoder(codecpr->codec_id);
//videoDecoder = avcodec_find_decoder_by_name("h264_qsv");
if (videoDecoder == NULL) {
qDebug() << TIMEMS << "video decoder not found";
return false;
}
videoCodec = avcodec_alloc_context3(nullptr);
avcodec_parameters_to_context(videoCodec, codecpr);
//设置加速解码
videoCodec->lowres = videoDecoder->max_lowres;
videoCodec->flags2 |= AV_CODEC_FLAG2_FAST;
//打开视频解码器
result = avcodec_open2(videoCodec, videoDecoder, NULL);
if (result < 0) {
qDebug() << TIMEMS << "open video codec error";
return false;
}
//获取分辨率大小
videoWidth = videoCodec->width;
videoHeight = videoCodec->height;
//如果没有获取到宽高则返回
if (videoWidth == 0 || videoHeight == 0) {
qDebug() << TIMEMS << "find width height error";
return false;
}
// QString videoInfo = QString("视频流信息 -> 索引: %1 解码: %2 格式: %3 时长: %4 秒 分辨率: %5*%6")
// .arg(videoStreamIndex).arg(videoDecoder->name).arg(avFormatContext->iformat->name)
// .arg((avFormatContext->duration) / 1000000).arg(videoWidth).arg(videoHeight);
// qDebug() << TIMEMS << videoInfo;
}
//----------视频流部分结束----------
//----------音频流部分开始,打个标记方便折叠代码----------
//----------音频流部分结束----------
//预分配好内存
avPacket = av_packet_alloc();
avFrame = av_frame_alloc();
avFrame2 = av_frame_alloc();
avFrame3 = av_frame_alloc();
//比较上一次文件的宽度高度,当改变时,需要重新分配内存
if (oldWidth != videoWidth || oldHeight != videoHeight) {
// int byte = avpicture_get_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight);
int byte = av_image_get_buffer_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight,1);
buffer = (uint8_t *)av_malloc(byte * sizeof(uint8_t));
oldWidth = videoWidth;
oldHeight = videoHeight;
}
//定义像素格式
AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;
AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;
//通过解码器获取解码格式
srcFormat = videoCodec->pix_fmt;
//默认最快速度的解码采用的SWS_FAST_BILINEAR参数,可能会丢失部分图片数据,可以自行更改成其他参数
int flags = SWS_FAST_BILINEAR;
//开辟缓存存储一帧数据
//以下两种方法都可以,avpicture_fill已经逐渐被废弃
//avpicture_fill((AVPicture *)avFrame3, buffer, dstFormat, videoWidth, videoHeight);
av_image_fill_arrays(avFrame3->data, avFrame3->linesize, buffer, dstFormat, videoWidth, videoHeight, 1);
//图像转换
swsContext = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL);
//输出视频信息
//av_dump_format(avFormatContext, 0, url.toStdString().data(), 0);
//qDebug() << TIMEMS << "init ffmpeg finsh";
return true;
}
void FFmpegThread::run()
{
while (!stopped) {
//根据标志位执行初始化操作
if (isPlay) {
if(this->init()){
isPlay = false;
continue;
}else {
emit sig_videoInitError();
break;
}
}
if (av_read_frame(avFormatContext, avPacket) >= 0) {
//判断当前包是视频还是音频
int index = avPacket->stream_index;
if (index == videoStreamIndex) {
//解码视频流 avcodec_decode_video2 方法已被废弃
#if 0
avcodec_decode_video2(videoCodec, avFrame2, &frameFinish, avPacket);
#else
frameFinish = avcodec_send_packet(videoCodec, avPacket);
if (frameFinish < 0) {
continue;
}
frameFinish = avcodec_receive_frame(videoCodec, avFrame2);
涂鸦c
- 粉丝: 292
- 资源: 1