#include "ffmpeg.h"
#include "ui_ffmpeg.h"
//#define FFMPEG_MJPEG
//#define FFMPEG_H264
#define FFMPEG_YUV
#define TIMEMS qPrintable(QTime::currentTime().toString("HH:mm:ss zzz"))
ffmpeg::ffmpeg(QWidget *parent) :
QThread(parent)
{
initVideo();
}
ffmpeg::~ffmpeg()
{
}
int ffmpeg::initVideo()
{
//注册库中所有可用的文件格式和解码器
av_register_all();
//注册所有设备,主要用于本地摄像机播放支持
avdevice_register_all();
//初始化网络流格式,使用网络流时必须先执行
avformat_network_init();
qDebug() << TIMEMS << "init ffmpeg lib ok" << " version:" << FFMPEG_VERSION;
const char *inputFilename = "/dev/video4";
AVDictionary *options = NULL;
AVCodec *videoDecoder = NULL;
av_dict_set(&options, "framerate", "25", 0);
av_dict_set(&options, "video_size", "1920x1080", 0);
#ifdef FFMPEG_MJPEG
av_dict_set(&options, "input_format", "mjpeg", 0);
#endif
#ifdef FFMPEG_YUV
av_dict_set(&options, "input_format", "yuyv422", 0);
#endif
ifmt_ctx = avformat_alloc_context();
AVInputFormat *ifmt = av_find_input_format("video4linux2");
if (nullptr != ifmt) {
qDebug("input device name video4linux2!");
} else {
qDebug("Null point ");
}
//打开输入视频流
int result = avformat_open_input(&ifmt_ctx, inputFilename, ifmt, &options);
if (result < 0) {
qDebug() << TIMEMS << "open input error" << inputFilename;
return false;
}
//获取流信息
result = avformat_find_stream_info(ifmt_ctx, NULL);
if (result < 0) {
qDebug() << TIMEMS << "find stream info error";
return false;
}
videoStreamIndex = -1;
videoStreamIndex = av_find_best_stream(ifmt_ctx, AVMEDIA_TYPE_VIDEO, -1, -1, &videoDecoder, 0);
if (videoStreamIndex < 0) {
qDebug() << TIMEMS << "find video stream index error";
return false;
}
//获取输入视频流
in_stream = ifmt_ctx->streams[videoStreamIndex];
if (!in_stream)
{
printf("Failed get input stream\n");
return false;
}
//获取视频流解码器
videoCodec = in_stream->codec;
//获取分辨率大小
videoWidth = in_stream->codec->width;
videoHeight = in_stream->codec->height;
//如果没有获取到宽高则返回
if (videoWidth == 0 || videoHeight == 0) {
qDebug() << TIMEMS << "find width height error";
return false;
}
//获取视频流的帧率 fps,要对0进行过滤,除数不能为0,有些时候获取到的是0
int num = in_stream->codec->framerate.num;
int den = in_stream->codec->framerate.den;
if (num != 0 && den != 0) {
videoFps = num / den ;
}
QString videoInfo = QString("视频流信息 -> 索引: %1 格式: %2 时长: %3 秒 fps: %4 分辨率: %5*%6")
.arg(videoStreamIndex).arg(ifmt_ctx->iformat->name)
.arg((ifmt_ctx->duration) / 1000000).arg(videoFps).arg(videoWidth).arg(videoHeight);
qDebug() << TIMEMS << videoInfo;
//打开视频解码器
result = avcodec_open2(videoCodec, videoDecoder, NULL);
if (result < 0) {
qDebug() << TIMEMS << "open video codec error";
return false;
}
avPacket = av_packet_alloc();
avFrame = av_frame_alloc();
avFrame2 = av_frame_alloc();
avFrame3 = av_frame_alloc();
//比较上一次文件的宽度高度,当改变时,需要重新分配内存
if (oldWidth != videoWidth || oldHeight != videoHeight) {
int byte = avpicture_get_size(AV_PIX_FMT_RGB32, videoWidth, videoHeight);
buffer = (uint8_t *)av_malloc(byte * sizeof(uint8_t));
oldWidth = videoWidth;
oldHeight = videoHeight;
}
//定义像素格式
AVPixelFormat srcFormat = AV_PIX_FMT_YUV420P;
AVPixelFormat dstFormat = AV_PIX_FMT_RGB32;
#ifdef FFMPEG_MJPEG
srcFormat = AV_PIX_FMT_YUV420P;
#endif
#ifdef FFMPEG_YUV
srcFormat = AV_PIX_FMT_YUYV422;
#endif
#ifdef FFMPEG_H264
srcFormat = AV_PIX_FMT_YUV420P;
#endif
av_image_fill_arrays(avFrame3->data, avFrame3->linesize, buffer, dstFormat, videoWidth, videoHeight, 1);
//默认最快速度的解码采用的SWS_FAST_BILINEAR参数,可能会丢失部分图片数据,可以自行更改成其他参数
int flags = SWS_FAST_BILINEAR;
swsContext = sws_getContext(videoWidth, videoHeight, srcFormat, videoWidth, videoHeight, dstFormat, flags, NULL, NULL, NULL);
qDebug() << TIMEMS << "init ffmpegVideo ok";
}
int ffmpeg::playVideo()
{
while(true)
{
if (av_read_frame(ifmt_ctx, avPacket) >= 0) {
//判断当前包是视频还是音频
int packetSize = avPacket->size;
int index = avPacket->stream_index;
in_stream = ifmt_ctx->streams[avPacket->stream_index];
if (index == videoStreamIndex) {
avcodec_decode_video2(videoCodec, avFrame2, &frameFinish, avPacket);
if (frameFinish)
{
//将数据转成一张图片
sws_scale(swsContext, (const uint8_t *const *)avFrame2->data, avFrame2->linesize, 0, videoHeight, avFrame3->data, avFrame3->linesize);
//以下两种方法都可以
//QImage image(avFrame3->data[0], videoWidth, videoHeight, QImage::Format_RGB32);
QImage image((uchar *)buffer, videoWidth, videoHeight, QImage::Format_RGB32);
if (!image.isNull()) {
emit receiveImage(image);
msleep(20);
}
}
av_packet_unref(avPacket);
av_freep(avPacket);
}
}
}
}
void ffmpeg::run()
{
playVideo();
}
评论0
最新资源