static AVFormatContext *i_fmt_ctx;
static AVStream *i_video_stream;
static AVFormatContext *o_fmt_ctx;
static AVStream *o_video_stream;
static bool bStop = false;
static unsigned __stdcall rtsp2mp4(void * pThis)
{
AVDictionary *opts = NULL;
avcodec_register_all();
av_register_all();
avformat_network_init();
/* should set to NULL so that avformat_open_input() allocate a new one */
i_fmt_ctx = NULL;
char rtspUrl[] = "rtsp://admin:admin123@172.16.28.254:554/cam/realmonitor?channel=1&subtype=0";
const char *filename = "C:\\Users\\PC\\Desktop\\MP4\\test.mp4";
//打开rtsp网络流
if (avformat_open_input(&i_fmt_ctx, rtspUrl, NULL, NULL) != 0)
{
fprintf(stderr, "could not open input file\n");
return -1;
}
//获取网络输入流信息
if (avformat_find_stream_info(i_fmt_ctx, NULL)<0)
{
fprintf(stderr, "could not find stream info\n");
return -1;
}
//av_dump_format(i_fmt_ctx, 0, argv[1], 0);
/* find first video stream */
for (unsigned i = 0; i<i_fmt_ctx->nb_streams; i++)
{
if (i_fmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
{
i_video_stream = i_fmt_ctx->streams[i];
break;
}
}
if (i_video_stream == NULL)
{
fprintf(stderr, "didn't find any video stream\n");
return -1;
}
//根据网络流信息初始化输出流信息
avformat_alloc_output_context2(&o_fmt_ctx, NULL, NULL, filename);
/*
* since all input files are supposed to be identical (framerate, dimension, color format, ...)
* we can safely set output codec values from first input file
*/
o_video_stream = avformat_new_stream(o_fmt_ctx, NULL);
{
AVCodecContext *c;
c = o_video_stream->codec;
c->bit_rate = 400000;
c->codec_id = i_video_stream->codec->codec_id;
c->codec_type = i_video_stream->codec->codec_type;
c->time_base.num = i_video_stream->time_base.num;
c->time_base.den = i_video_stream->time_base.den;
fprintf(stderr, "time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den);
c->width = i_video_stream->codec->width;
c->height = i_video_stream->codec->height;
c->pix_fmt = i_video_stream->codec->pix_fmt;
printf("%d %d %d", c->width, c->height, c->pix_fmt);
c->flags = i_video_stream->codec->flags;
c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
c->me_range = i_video_stream->codec->me_range;
c->max_qdiff = i_video_stream->codec->max_qdiff;
c->qmin = i_video_stream->codec->qmin;
c->qmax = i_video_stream->codec->qmax;
c->qcompress = i_video_stream->codec->qcompress;
}
//创建并打开MP4文件
avio_open(&o_fmt_ctx->pb, filename, AVIO_FLAG_WRITE);
//设置输出FMP4的参数,并写文件头信息
#if 1
av_dict_set(&opts, "movflags", "frag_keyframe+empty_moov", 0);
avformat_write_header(o_fmt_ctx, &opts);
av_dict_free(&opts);
#else
avformat_write_header(o_fmt_ctx, NULL);
#endif
int last_pts = 0;
int last_dts = 0;
//从输入文件读取每一包数据并写入MP4文件
int64_t pts, dts;
while (!bStop)
{
AVPacket i_pkt;
av_init_packet(&i_pkt);
i_pkt.size = 0;
i_pkt.data = NULL;
if (av_read_frame(i_fmt_ctx, &i_pkt) <0)
break;
/*
* pts and dts should increase monotonically
* pts should be >= dts
*/
i_pkt.flags |= AV_PKT_FLAG_KEY;
pts = i_pkt.pts;
dts = i_pkt.dts;
i_pkt.stream_index = 0;
static int num = 1;
printf("frame %d\n", num++);
av_interleaved_write_frame(o_fmt_ctx, &i_pkt);
_sleep(10);
}
last_dts += dts;
last_pts += pts;
avformat_close_input(&i_fmt_ctx);
//写MP4文件尾
av_write_trailer(o_fmt_ctx);
avcodec_close(o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]->codec);
av_freep(&o_fmt_ctx->streams[0]);
avio_close(o_fmt_ctx->pb);
av_free(o_fmt_ctx);
return 0;
}