一、说明

本文代码与 QT 使用ffmpeg 学习5 ffmpeg API推流Demo​代码类似。
主要是加了pts dts的处理。

在使用 QT 使用ffmpeg 学习5 ffmpeg API推流Demo 代码,当接收流是rtsp流的时候,会出现​​av_interleaved_write_frame​​ 返回 -22的错误。

下面代码使用方式2加上了一个fix代码。

网上很多讲要求:
AVPacket 中 pts 必须大于或等于dts, 否则就返回-22 错误。

我查看demofctf 的情况,代码出现-22错误并非因为dts大于pts了,而是pts此次的值比上次的pts小触发了-22错误。估计与帧类型有关,以后再仔细研究。

这里处理的方式是记录上次的pts值并累加到下一次的pts上。

// 复制包
if (pts == 0)
{
pts = pkt.pts;
dts = pkt.dts;
}
else
{
pkt.pts += pts;
pkt.dts += dts;
}

二、代码

#define __STDC_CONSTANT_MACROS
#ifdef _WIN32
//Windows
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
}
#else
//Linux...
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libavutil/mathematics.h>
#include <libavutil/time.h>
#ifdef __cplusplus
};
#endif
#endif

int main(int argc, char* argv[])
{
AVOutputFormat *ofmt = NULL;
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
int ret, i;
int videoindex=-1;
int frame_index=0;
int64_t start_time=0;
int64_t pts=0;
int64_t dts=0;

in_filename = "rtsp地址";

out_filename= "推送rtmp地址";
//out_filename = "rtp://233.233.233.233:6666";//输出 URL(Output URL)[UDP]

// 初始化ffmpeg
av_register_all();
// 初始化网络库
avformat_network_init();
// 初始化输入
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, 0)) < 0) {
printf( "Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
printf( "Failed to retrieve input stream information");
goto end;
}
for(i=0; i<ifmt_ctx->nb_streams; i++)
if(ifmt_ctx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
videoindex=i;
break;
}

// 打印流媒体信息
av_dump_format(ifmt_ctx, 0, in_filename, 0);
// 输出流
avformat_alloc_output_context2(&ofmt_ctx, NULL, "flv", out_filename); //RTMP
//avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);//UDP
if (!ofmt_ctx) {
printf( "Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
printf("ifmt_ctx->nb_streams=%d \n" , ifmt_ctx->nb_streams);

for (i = 0; i < ifmt_ctx->nb_streams; i++) {
//Create output AVStream according to input AVStream
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream) {
printf( "Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
//Copy the settings of AVCodecContext
ret = avcodec_copy_context(out_stream->codec, in_stream->codec);
if (ret < 0) {
printf( "Failed to copy context from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
// 打印输出流信息
av_dump_format(ofmt_ctx, 0, out_filename, 1);

// 使用avio_open 打开输出
if (!(ofmt->flags & AVFMT_NOFILE)) {
ret = avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE);
if (ret < 0) {
printf( "Could not open output URL '%s'", out_filename);
goto end;
}
}
// 输出写文件头
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
printf( "Error occurred when opening output URL\n");
goto end;
}
start_time=av_gettime();

while (1) {
printf("before pts=%i dts=%i\n" , pkt.pts,pkt.dts);
printf("pkt.stream_index=%i\n" , pkt.stream_index);
AVStream *in_stream, *out_stream;
//获取一帧
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
// 如果没有PTS , 例如H.264裸流
//更简单的处理就用 PTSpkt.pts==AV_NOPTS_VALUE
if(pkt.pts==AV_NOPTS_VALUE){
//Write PTS
AVRational time_base1=ifmt_ctx->streams[videoindex]->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
//计算 pts的公式
pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts=pkt.pts;
pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
}
//Important:Delay
if(pkt.stream_index==videoindex){
AVRational time_base=ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q={1,AV_TIME_BASE};
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
if (pts_time > now_time)
av_usleep(pts_time - now_time);
}
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];

// 复制包
if (pts == 0)
{
pts = pkt.pts;
dts = pkt.dts;
}
else
{
pkt.pts += pts;
pkt.dts += dts;
}

pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);

pkt.pos = -1;
// 打印log
if(pkt.stream_index==videoindex){
printf("Send %8d video frames to output URL\n",frame_index);
frame_index++;
}

//ret = av_write_frame(ofmt_ctx, &pkt);
printf("after pts=%i dts=%i\n" , pkt.pts,pkt.dts);
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0) {
printf( "Error muxing packet %d \n",ret);
break;
}
av_free_packet(&pkt);
}
//Write file trailer
av_write_trailer(ofmt_ctx);
end:
avformat_close_input(&ifmt_ctx);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF) {
printf( "Error occurred.\n");
return -1;
}
return 0;
}

使用vlc可以打开rtmp的推送目标地址预览图像。