1.测试拿过来个视频,发现用ijk播放器与系统播放器(mediaplayer)播放感觉不一样,用ijk播放感觉播放页面卡顿一点,没有系统播放器那么流畅。

下面看一下这个问题,这个问题的原因其实很简单,由于我丢帧值设置的是5,改成1就可以感觉2个播放器在体验上感觉差不多了。(丢5帧人眼就可以看出来差别了!)。

2.在高通660的机器上播放一个4k(30fps)视频,但是无法正常播放,实际一秒的解码帧只有20帧,实际播放只有4帧这样。导致画面卡顿,音视频不同步。

后面发现是由于这个视频,在高通机器上很多帧解码都比较慢,导致视频一直比音频慢,在硬解码丢帧时,判断视频一直比音频慢,导致视频一直在丢帧,也就出现看上去的卡顿。

丢帧原理

首先需要明白丢帧需要丢哪里的帧,丢什么帧呢?

丢帧可以丢解码前的帧也可以丢解码后的帧,

丢解码前的帧需要判断帧类型来丢,可以选择丢b,p帧,如果需要丢I帧就需要把整个gop全部丢掉防止花屏。

丢解码后的帧可以不用按照帧类型来丢,由于帧都是解码后的数据了,如yuv,直接按照pts来判断音视频是否不同步来丢就可以了。

1.ffplay中丢帧设计

我们看下ffplay中丢帧的设计:ffplay丢的是解码后的视频帧

在video_thread解码线程中,我们可以看到get_video_frame函数主要用于解码获取解码后的数据avframe,然后来检测判断丢帧。

static int get_video_frame(VideoState *is, AVFrame *frame)
{
int got_picture;
//解码获取解码后的数据avframe
if ((got_picture = decoder_decode_frame(&is->viddec, frame, NULL)) < 0)
return -1;

if (got_picture) {
double dpts = NAN;

if (frame->pts != AV_NOPTS_VALUE)
dpts = av_q2d(is->video_st->time_base) * frame->pts;//视频的pts转换为ms,也就是当前进度时间
//视频的宽高比
frame->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, frame);
//丢帧数大于0且同步不是按照video
if (framedrop>0 || (framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) {

if (frame->pts != AV_NOPTS_VALUE) {
//frame_last_filter_delay 默认是0
//diff小于0,视频比音频慢,丢帧
//diff大于0,视频比音频快,不丢帧
double diff = dpts - get_master_clock(is);
// AV_NOSYNC_THRESHOLD:同步阈值。如果误差太大,则不进行校正,也不丢帧来做同步了
if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD &&
diff - is->frame_last_filter_delay < 0 &&
is->viddec.pkt_serial == is->vidclk.serial &&
is->videoq.nb_packets) {
is->frame_drops_early++;
av_frame_unref(frame);//丢帧
got_picture = 0;
}
}
}
}

return got_picture;
}

2.ijk中丢帧设计

ijk中丢帧丢的也是解码后的视频帧,分为硬解码丢帧和软解码丢帧。

2.1.硬解码丢帧设计

在ffpipenode_android_mediacodec_vdec.c文件中,
func_run_sync函数主要用来处理整个硬解码的实现逻辑。

/**
*硬解码处理流程
**/
static int func_run_sync(IJKFF_Pipenode *node)
{
JNIEnv *env = NULL;
IJKFF_Pipenode_Opaque *opaque = node->opaque;
FFPlayer *ffp = opaque->ffp;
VideoState *is = ffp->is;
Decoder *d = &is->viddec;
PacketQueue *q = d->queue;
int ret = 0;
int dequeue_count = 0;
AVFrame *frame = NULL;
int got_frame = 0;
AVRational tb = is->video_st->time_base;
AVRational frame_rate = av_guess_frame_rate(is->ic, is->video_st, NULL);
double duration;
double pts;

if (!opaque->acodec) {
return ffp_video_thread(ffp);
}

if (JNI_OK != SDL_JNI_SetupThreadEnv(&env)) {
ALOGE("%s: SetupThreadEnv failed\n", __func__);
return -1;
}

frame = av_frame_alloc();
if (!frame)
goto fail;
//创建数据入队线程enqueue_thread_func
opaque->enqueue_thread = SDL_CreateThreadEx(&opaque->_enqueue_thread, enqueue_thread_func, node, "amediacodec_input_thread");
if (!opaque->enqueue_thread) {
ALOGE("%s: SDL_CreateThreadEx failed\n", __func__);
ret = -1;
goto fail;
}
//循环拉取解码数据
while (!q->abort_request) {
int64_t timeUs = opaque->acodec_first_dequeue_output_request ? 0 : AMC_OUTPUT_TIMEOUT_US;
got_frame = 0;
//硬解获取frame
ret = drain_output_buffer(env, node, timeUs, &dequeue_count, frame, &got_frame);
if (opaque->acodec_first_dequeue_output_request) {
SDL_LockMutex(opaque->acodec_first_dequeue_output_mutex);
opaque->acodec_first_dequeue_output_request = false;
SDL_CondSignal(opaque->acodec_first_dequeue_output_cond);
SDL_UnlockMutex(opaque->acodec_first_dequeue_output_mutex);
}
//数据拉取出错
if (ret != 0) {
ret = -1;
if (got_frame && frame->opaque) //release buffer false通知MediaCodec丢弃这一帧
{
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
}
goto fail;
}
if (got_frame) {
duration = (frame_rate.num && frame_rate.den ? av_q2d((AVRational){frame_rate.den, frame_rate.num}) : 0);
pts = (frame->pts == AV_NOPTS_VALUE) ? NAN : frame->pts * av_q2d(tb);
//设置的丢帧大于0或者 丢帧数不等于0同时主时钟不是视频时钟
if (ffp->framedrop > 0 || (ffp->framedrop && ffp_get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) {
ffp->stat.decode_frame_count++;//解码帧计数
if (frame->pts != AV_NOPTS_VALUE) {
double dpts = pts;//如视频某帧pts
double diff = dpts - ffp_get_master_clock(is);//视频帧与音频帧的差值(如果主时钟是音频的话)
//frame_last_filter_delay 这个时间是0,所以diff大于0,表示视频比音频快,不需要丢帧
//如果diff小于0,表示视频比音频慢,需要丢帧
//音频比视频快,且差距小于最大同步值,超过该值则不做同步处理了
if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD &&
diff - is->frame_last_filter_delay < 0 &&
is->viddec.pkt_serial == is->vidclk.serial &&
is->videoq.nb_packets) {//解码器中视频帧的包序列等于视频时钟中的序列号,视频队列还有视频帧
is->frame_drops_early++;
is->continuous_frame_drops_early++;//初始值是0
if (is->continuous_frame_drops_early > ffp->framedrop) {//如果continuous_frame_drops_early变量大于丢帧数,初始化ontinuous_frame_drops_early为0
is->continuous_frame_drops_early = 0;
} else {
ffp->stat.drop_frame_count++;//丢帧数加加
//丢帧率 = 丢帧数 / 解码帧数
ffp->stat.drop_frame_rate = (float)(ffp->stat.drop_frame_count) / (float)(ffp->stat.decode_frame_count);
if (frame->opaque) {//通知mediacodec,释放掉,不显示
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);
}
av_frame_unref(frame);//释放掉frame
continue;
}
}
}
}
//帧入队,放置解码后视频队列中,在video_refresh中处理
ret = ffp_queue_picture(ffp, frame, pts, duration, av_frame_get_pkt_pos(frame), is->viddec.pkt_serial);
if (ret) {//入队出错,release buffer false通知MediaCodec丢弃这一帧,不显示
if (frame->opaque)
SDL_VoutAndroid_releaseBufferProxyP(opaque->weak_vout, (SDL_AMediaCodecBufferProxy **)&frame->opaque, false);

}
av_frame_unref(frame);
}
}

fail:
av_frame_free(&frame);
opaque->abort = true;
SDL_WaitThread(opaque->enqueue_thread, NULL);
SDL_AMediaCodecFake_abort(opaque->acodec);
if (opaque->n_buf_out) {
free(opaque->amc_buf_out);
opaque->n_buf_out = 0;
opaque->amc_buf_out = NULL;
opaque->off_buf_out = 0;
opaque->last_queued_pts = AV_NOPTS_VALUE;
}
if (opaque->acodec) {
SDL_VoutAndroid_invalidateAllBuffers(opaque->weak_vout);
SDL_LockMutex(opaque->acodec_mutex);
SDL_UnlockMutex(opaque->acodec_mutex);
}
SDL_AMediaCodec_stop(opaque->acodec);
SDL_AMediaCodec_decreaseReferenceP(&opaque->acodec);
ALOGI("MediaCodec: %s: exit: %d", __func__, ret);
return ret;
#if 0 //硬解出错,走软解
fallback_to_ffplay:
ALOGW("fallback to ffplay decoder\n");
return ffp_video_thread(opaque->ffp);
#endif
}

2.2软解码丢帧设计

static int get_video_frame(FFPlayer *ffp, AVFrame *frame)
{
VideoState *is = ffp->is;
int got_picture;
//视频流buffer加载缓存统计
ffp_video_statistic_l(ffp);
//软解耗时测试
//int64_t starttime = av_gettime_relative();
//解码获取解码后的数据avframe
if ((got_picture = decoder_decode_frame(ffp, &is->viddec, frame, NULL)) < 0)
return -1;
/*
if(frame->key_frame) {//关键帧软解耗时测试
int64_t endtime = av_gettime_relative();
int usetime = endtime - starttime;
ALOGE("zmlruan>>>>>>usetime:%d",usetime);
}*/
if (got_picture) {//解码成功,拿到解码数据
double dpts = NAN;

if (frame->pts != AV_NOPTS_VALUE)
dpts = av_q2d(is->video_st->time_base) * frame->pts;//视频的pts转换为ms,也就是当前进度时间
//视频的宽高比
frame->sample_aspect_ratio = av_guess_sample_aspect_ratio(is->ic, is->video_st, frame);
//丢帧数大于0且同步不是按照video
if (ffp->framedrop>0 || (ffp->framedrop && get_master_sync_type(is) != AV_SYNC_VIDEO_MASTER)) {
ffp->stat.decode_frame_count++;//解码数
if (frame->pts != AV_NOPTS_VALUE) {//diff = 视频时间戳 减去主时钟时间戳,(这里看音频时间戳)
double diff = dpts - get_master_clock(is); // AV_NOSYNC_THRESHOLD:同步阈值。如果误差太大,则不进行校正
if (!isnan(diff) && fabs(diff) < AV_NOSYNC_THRESHOLD &&
diff - is->frame_last_filter_delay < 0 &&
is->viddec.pkt_serial == is->vidclk.serial &&
is->videoq.nb_packets) {
is->frame_drops_early++;
is->continuous_frame_drops_early++;
if (is->continuous_frame_drops_early > ffp->framedrop) {
is->continuous_frame_drops_early = 0;
} else {
ffp->stat.drop_frame_count++;//丢帧数加加
//丢帧率 = 丢帧数 / 解码帧数
ffp->stat.drop_frame_rate = (float)(ffp->stat.drop_frame_count) / (float)(ffp->stat.decode_frame_count);
av_frame_unref(frame);//丢帧
got_picture = 0;//修改返回参数,表示没获取到视频帧,丢掉了
}
}
}
}
}

return got_picture;
}

这样我们就可以看到ijk中丢帧的实现逻辑了。