第五节ffplay android移植

作者:xdongp@gmail.com



之前我们已经做了好了准备工作,现在还做ffplay的移植,这是最难的一关,也是最复杂的一关。这里主要是要分为两步:ffplay使用sdl-1.3移植和ffplay的android接口编写调试。

 

1.     Ffplay sdl-1.3移植

android上能够使用的sdl版本必须是sdl-1.3及以上,sdl-1.3的移植如下,主要修改视频显示接口,将SDL_DisplayYUVOverlay(vp->bmp, &rect) 改为SDL_CreateTexture()方式,主要是将YUV的显示模式改为RGB的显示模式。主要修改点如下:

变量定义修改

以前使用SDL_Surfaces显示,新版使用SDL_Window作为显示区域,新版支持多窗口显示,SDL_Renderer是显示程序,SDL_Texture为数据容器。


#ifdef SDL12
    SDL_Overlay *bmp;
#else
    AVFrame *pFrameRGB;
    int numBytes;
    uint8_t *buffer;
#endif
 
#ifdef SDL12
static SDL_Surface*screen = NULL;
#else
staticSDL_Window *window = NULL;
staticSDL_Renderer *renderer = NULL;
staticSDL_Texture *texture = NULL;
#endif


以上是变量定义部分,我们使用的是texture代替Overlay的方式

 

显示创建修改

使用SDL_CreateRenderer来创建用于显示的Render:

static int video_open(VideoState *is, intforce_set_video_mode)的修改:


#ifdef SDL12
    if (screen && is->width ==screen->w && screen->w == w
            && is->height==screen->h && screen->h == h && !force_set_video_mode)
    return 0;
    screen = SDL_SetVideoMode(w, h, 0, flags);
    if(!screen) {
        fprintf(stderr, "SDL: could notset video mode - exiting\n");
        do_exit(is);
    }
    if (!window_title)
    window_title = input_filename;
    SDL_WM_SetCaption(window_title,window_title);
 
    is->width = screen->w;
    is->height = screen->h;
#else
    //SDL_SetVideoMode(w, h, 32, 0);
    window =SDL_CreateWindow("MySDL", SDL_WINDOWPOS_CENTERED,
            SDL_WINDOWPOS_CENTERED, w, h,SDL_WINDOW_SHOWN|SDL_WINDOW_OPENGL);
    if (window == NULL ) {
        fprintf(stderr, "SDL: could notset video window - exiting\n");
        exit(1);
    }
 
    //renderer = SDL_CreateRenderer(window, -1,SDL_RENDERER_ACCELERATED);
    renderer = SDL_CreateRenderer(window, -1,SDL_RENDERER_TARGETTEXTURE);
 
    if (!renderer) {
        fprintf(stderr, "Couldn't setcreate renderer: %s\n", SDL_GetError());
        exit(1);
    }
 
    is->width = w;
    is->height = h;
#endif


 

 

图片内存空间分配修改

static void alloc_picture(AllocEventProps*event_props) 图片获取部分修改
 
#ifdef SDL12
    if (vp->bmp)
    SDL_FreeYUVOverlay(vp->bmp);
#else
    if (vp->pFrameRGB) {
        // we already have one make another,bigger/smaller
        if (vp->pFrameRGB) {
            av_free(vp->pFrameRGB);
            vp->pFrameRGB = 0;
        }
        if (vp->buffer) {
            av_free(vp->buffer);
            vp->buffer = 0;
        }
    }
#endif
    vp->width = frame->width;
    vp->height = frame->height;
    vp->pix_fmt = frame->format;
 
    //video_open(event_props->is, 0);
 
#ifdef SDL12
    vp->bmp =SDL_CreateYUVOverlay(vp->width, vp->height,
            SDL_YV12_OVERLAY,
            screen);
    if (!vp->bmp ||vp->bmp->pitches[0] < vp->width) {
        /* SDL allocates a buffer smaller thanrequested if the video
         * overlay hardware is unable tosupport the requested size. */
        fprintf(stderr, "Error: the videosystem does not support an image\n"
                "size of %dx%d pixels. Tryusing -lowres or -vf \"scale=w:h\"\n"
                "to reduce the imagesize.\n", vp->width, vp->height );
        do_exit(is);
    }
#else
    vp->pFrameRGB = avcodec_alloc_frame();
 
    vp->width =is->video_st->codec->width;
    vp->height = is->video_st->codec->height;
 
    vp->numBytes =avpicture_get_size(dst_fix_fmt, vp->width, vp->height);
    vp->buffer = (uint8_t *)av_malloc(vp->numBytes * sizeof(uint8_t));
 
    if (!vp->pFrameRGB || !vp->buffer) {
        printf("can not get frame memory,exit\n");
    }
 
    avpicture_fill((AVPicture*)vp->pFrameRGB, vp->buffer, dst_fix_fmt,
            vp->width, vp->height);
#endif

 


 

 

色彩转换方面修改

在static int queue_picture(VideoState *is,…)函数中,之前版本使用的YUV显示,Android只能显示RGB数据,因此改用RGB显示,主要使用的sws_scale(…)函数,此函数是一个非常耗时函数,对此函数的优化对画面显示和程序效率都有很大帮助。  sws_scale(…)的画面显示是没有问题,但是显示效率不够优化,此内容在后面优化部分再说


#ifdef SDL12
    if (vp->bmp) {
#else
    if (vp->pFrameRGB) {
#endif
 
#ifdef SDL12
        AVPicture pict = { {0}};
 
        /* get a pointer on the bitmap */
        SDL_LockYUVOverlay (vp->bmp);
 
        pict.data[0] =vp->bmp->pixels[0];
        pict.data[1] =vp->bmp->pixels[2];
        pict.data[2] = vp->bmp->pixels[1];
                SDL_CondWait(is->pictq_cond,is->pictq_mutex);
            }
        }
        SDL_UnlockMutex(is->pictq_mutex);
 
        if (is->videoq.abort_request)
            return -1;
    }
 
    /* if the frame is not skipped, thendisplay it */
#ifdef SDL12
    if (vp->bmp) {
#else
    if (vp->pFrameRGB) {
#endif
 
#ifdef SDL12
        AVPicture pict = { {0}};
 
        /* get a pointer on the bitmap */
        SDL_LockYUVOverlay (vp->bmp);
 
        pict.data[0] = vp->bmp->pixels[0];
        pict.data[1] =vp->bmp->pixels[2];
        pict.data[2] =vp->bmp->pixels[1];
 
        pict.linesize[0] =vp->bmp->pitches[0];
        pict.linesize[1] =vp->bmp->pitches[2];
        pict.linesize[2] =vp->bmp->pitches[1];
 
        is->img_convert_ctx =sws_getCachedContext(is->img_convert_ctx,
                vp->width, vp->height,vp->pix_fmt, vp->width, vp->height,
                PIX_FMT_YUV420P, sws_flags,NULL, NULL, NULL);
 
        if (is->img_convert_ctx == NULL) {
            LOGV( "Cannot initialize theconversion context\n");
            exit(1);
        }
        sws_scale(is->img_convert_ctx,src_frame->data, src_frame->linesize,
                0, vp->height, pict.data,pict.linesize);
        vp->sample_aspect_ratio =av_guess_sample_aspect_ratio(is->ic, is->video_st, src_frame);
 
        /* update the bitmap content */
        SDL_UnlockYUVOverlay(vp->bmp);
#else
        if (is->img_convert_ctx == NULL ) {
            is->img_convert_ctx =sws_getCachedContext(is->img_convert_ctx,
                    vp->width, vp->height,vp->pix_fmt, vp->width, vp->height,
                    dst_fix_fmt, sws_flags,NULL, NULL, NULL );
            if (is->img_convert_ctx == NULL) {
                LOGV( "Cannot initializethe conversion context!\n");
                exit(1);
            }
        }
        sws_scale(is->img_convert_ctx,src_frame->data, src_frame->linesize, 0,
               is->video_st->codec->height, vp->pFrameRGB->data,
                vp->pFrameRGB->linesize);
        vp->sample_aspect_ratio =av_guess_sample_aspect_ratio(is->ic,
                is->video_st, src_frame);
#endif


 

 

显示部分修改

在static void video_image_display(VideoState *is)中,使用SDL_UpdateTexture(…)来填充显示区域,使用SDL_RenderPresent(...)来将Rect中的数据刷新到屏幕:


#ifdef SDL12
    if (vp->bmp) {
#else
    if (vp->pFrameRGB) {
#endif
        if (vp->sample_aspect_ratio.num ==0)
            aspect_ratio = 0;
        else
            aspect_ratio =av_q2d(vp->sample_aspect_ratio);
 
        if (aspect_ratio <= 0.0)
            aspect_ratio = 1.0;
        aspect_ratio *= (float) vp->width /(float) vp->height;
 
        /* XXX: we suppose the screen has a 1.0pixel ratio */
        height = is->height;
        width = ((int) rint(height *aspect_ratio)) & ~1;
        if (width > is->width) {
            width = is->width;
            height = ((int) rint(width /aspect_ratio)) & ~1;
        }
        x= (is->width - width) / 2;
        y = (is->height - height) / 2;
        is->no_background = 0;
        rect.x = is->xleft + x;
        rect.y = is->ytop + y;
        rect.w = FFMAX(width, 1);
        rect.h = FFMAX(height, 1);
#ifdef SDL12
        SDL_DisplayYUVOverlay(vp->bmp,&rect);
#else
        if (NULL == texture) {
            texture =SDL_CreateTexture(renderer, display_fix_fmt,
                    SDL_TEXTUREACCESS_STATIC,is->video_st->codec->width,
                    is->video_st->codec->height);
            if (!texture) {
                LOGV( "Couldn't set createtexture: %s\n", SDL_GetError());
                exit(1);
            }
            SDL_SetTextureBlendMode(texture,SDL_BLENDMODE_BLEND );
            //best scale mode
            //SDL_SetTextureScaleMode(texture,SDL_TEXTURESCALEMODE_BEST);
        }
        SDL_RenderClear(renderer);
        SDL_UpdateTexture(texture, NULL,vp->pFrameRGB->data[0],
               vp->pFrameRGB->linesize[0]);
        SDL_RenderCopy(renderer, texture, NULL,&rect);
 
        g_current_duration = (int)get_master_clock(is) * 1000;
        SDL_RenderPresent(renderer);
 
#endif


 

 

流关闭修改

static void stream_close(VideoState *is) 修改如下:


#ifdef SDL12
        if (vp->bmp) {
            SDL_FreeYUVOverlay(vp->bmp);
            vp->bmp = NULL;
        }
#else
        if (vp->pFrameRGB) {
            av_free(vp->pFrameRGB);
            vp->pFrameRGB = 0;
        }
        if (vp->buffer) {
            av_free(vp->buffer);
            vp->buffer = 0;
        }
#endif


 

 

以上是基本上是所以基于SDL-1.3的ffplay的修改。主要是将YUV(Overlay)的方式改为RGB(Texture)方式,SDL内部使用的OpenGL ES方式进行渲染。由于SDL已经封装好了,我们这里也就不去探究了。如果不想使用SDL作为显示,可以直接显示OpenGL ES接口进行,但是移植的代价就大些。

 

 

2.      Android接口编写

Android程序对c的调用采用jni的方式,jni编写规范可以参考http://developer.android.com/training/articles/perf-jni.html或者其他文档。

对于播放器接口的设计,我们可以参考Android的MediaPlayer设计(http://developer.android.com/reference/android/media/MediaPlayer.html)。播放器中,最关键的有Init(初始化)、InitEd(初始化完成,可以设置回调函数)、Play(播放)、Stop(停止),Seek(拖动)、Exit(退出),getDuration(获取播放位置)、getTotalDuration(获取播放时长)。建立一个叫native.cpp的文件,接口设计如下:


#include<unistd.h>

#include<jni.h>
#include<android/log.h>
 
extern"C" {
#include"play.h"
}
 
#ifdefANDROID
 
/* Includethe SDL main definition header */
 
/*******************************************************************************
                 Functions called by JNI
*******************************************************************************/
 
 
// Libraryinit
//extern"C" jint JNI_OnLoad(JavaVM* vm, void* reserved)
//{
//    return JNI_VERSION_1_4;
//}
 
// Start upthe SDL app
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerInit(JNIEnv* env,  jobject obj)
{
   return player_init();
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerPrepare(JNIEnv*env,  jobject obj, jstring jfileName)
{
        jboolean isCopy;
        char localFileName[1024];
        const char *fileString     = env->GetStringUTFChars(jfileName,&isCopy);
 
        strncpy(localFileName, fileString,1024);
       env->ReleaseStringUTFChars(jfileName, fileString);
        return player_prepare(localFileName);
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerMain(JNIEnv* env,  jobject obj)
{
   return player_main();
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerExit(JNIEnv* env,  jobject obj)
{
   return player_exit();
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerSeekTo(JNIEnv*env,  jobject obj, jint msec)
{
   int pos = msec;
   return seekTo(pos);
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerPause(JNIEnv* env,  jobject obj)
{
   return streamPause();
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerIsPlay(JNIEnv*env,  jobject obj)
{
   return isPlay();
}
 
extern"C" int Java_org_libsdl_app_SDLActivity_PlayerGetDuration(JNIEnv*env,  jobject obj)
{
   return getDuration();
}
 
extern"C" intJava_org_libsdl_app_SDLActivity_PlayergetCurrentPosition(JNIEnv* env,  jobject obj)
{
   return getCurrentPosition();
}
 
#endif /*ANDROID */


 

 

3.      Jni和Java通信

Java调用Jni接口是比较简单的,但是如果Jni中的程序想通知Java程序某事事件已经发生,就需要Jni和Java通信了。Jni上也有C调用Java的例子,主要分为两种情况:调用进程为Jvm进程或者为子进程,方法有点不同。

我们本次设计的通信接口是采用类似信号的方式,在C中调用Java中预先写好的方法,然后触发事件,C中函数如下:

打开第四节中工程,找到jni/SDL/src/core/android/SDL_android.cpp

添加:


extern"C" void Android_Notify(int id){
    int status;
    JNIEnv *env;
    static bool isAttached = false;    
    status = mJavaVM->GetEnv((void **)&env, JNI_VERSION_1_4);
    if(status < 0) {
        LOGE("callback_handler: failed toget JNI environment, assuming native thread");
        status =mJavaVM->AttachCurrentThread(&env, NULL);
        if(status < 0) {
            LOGE("callback_handler: failedto attach current thread");
            return;
        }
        isAttached = true;
    }
 
    env->CallStaticVoidMethod(mActivityClass,mNotify, id);
 
    if (isAttached) {
        mJavaVM->DetachCurrentThread();
    }
 
}


将头文件添加到

jni/SDL/include/SDL_notify.h中

如下:


#ifndefNOTIFY_H
#defineNOTIFY_H
 
voidAndroid_Notify(int id);
 
#endif


这样就完成了

 

4.      编译工程

修改jni/src/Android.mk如下:


LOCAL_PATH :=$(call my-dir)
 
include$(CLEAR_VARS)
 
LOCAL_MODULE:= main
 
SDL_PATH :=../SDL
FFMPEG_PATH =../ffmpeg
 
LOCAL_C_INCLUDES:= $(LOCAL_PATH)/$(SDL_PATH)/include \
                                                                           $(LOCAL_PATH)/$(FFMPEG_PATH)/include\
                                                                           $(LOCAL_PATH)/include
 
# Add your applicationsource files here...
LOCAL_SRC_FILES:= $(SDL_PATH)/src/main/android/SDL_android_main.cpp  kuplayer_android.c native.cpp
 
LOCAL_CFLAGS+= -DANDROID
LOCAL_SHARED_LIBRARIES:= SDL
 
LOCAL_LDLIBS:= -lGLESv1_CM -llog
LOCAL_LDLIBS+= $(LOCAL_PATH)/"libffmpeg.so"
include$(BUILD_SHARED_LIBRARY)
 
 
include$(CLEAR_VARS)
LOCAL_MODULE:= ffmpeg
NDK_MODULE_PATH:= $(LOCAL_PATH)
LOCAL_SRC_FILES:= libffmpeg.so
include$(PREBUILT_SHARED_LIBRARY)


将第三节中的编译的ffmpeg的头文件拷贝过来


cd /home/xdpan/work/android-project/jni/
cp -R ../../ffmpeg-0.11.1/android/armv7-a/include./
cp ../../ffmpeg-0.11.1/android/armv7-a/lib/libffmpeg.so  ./src/


在jni目下执行ndk-build –j2就编译好了

 

 

源文件如下(或者到虚拟机相应的目录中提取)

kuplayer_android.c

native.cpp