1.关于ffmpeg移植到android中的有关内容前面的文章已有简单介绍。

2.想结合ffmpeg在android上做一些应用,由于0基础- - 。只能网上找各种资料从头学起.....于是看到关于tutorial的相关教程,首先从01开始,首先在PC上将01 实现,现在移植到手机上。。。。

3.移植到手机实现:主要就是通过java jni调用ffmpeg中的方法的过程。。。。

4.好了,进入正题:

(1)http://blog.csdn.net/ccm_oliver/article/details/8970774 参考前一篇文章,实现通过JNI调用avcodec_version() 的实例。【为什么 在hellojni demo中编译最后调试能过,新建一个android工程完全按照上述方法(结合JNI实现流程),编译通过后为什么调试不了? ( 还有菜鸟求指教 在java 结合JNI 编程得过程如何DEBUG啊????) 不会debug很烦躁....】

(2)修改/jni下面c语言的实现过程:

/*
  * This is mostly based off of the FFMPEG tutorial:
  * http://dranger.com/ffmpeg/
  * With a few updates to support Android output mechanisms and to update
  * places where the APIs have shifted.
  */
 #include <jni.h>
 #include <string.h>
 #include <stdio.h>
 #include <android/log.h>
 #include <android/bitmap.h>
 #include <stdlib.h>
 //包含ffmpeg库头文件
 #include <ffmpeg/libavcodec/avcodec.h>
 #include <ffmpeg/libavformat/avformat.h>
 #include <ffmpeg/libswscale/swscale.h>

 //-------------定义Android logtag---------------
 #define  LOG_TAG    "FFMPEGSample"
 #define  LOGI(...)  __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
 #define  LOGE(...)  __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)

 //全局对象
 AVFormatContext *pFormatCtx;
 AVCodecContext *pCodecCtx;
 AVFrame *pFrame;
 AVFrame *pFrameRGB;
 int videoStream;
 //------------------------------------
 //定义的静态方法,将某帧AVFrame在Android的Bitmap中绘制
 static void fill_bitmap(AndroidBitmapInfo*  info, void *pixels, AVFrame *pFrame)
 {
     uint8_t *frameLine;

     int  yy;
     for (yy = 0; yy < info->height; yy++) {
         uint8_t*  line = (uint8_t*)pixels;
         frameLine = (uint8_t *)pFrame->data[0] + (yy * pFrame->linesize[0]);

         int xx;
         for (xx = 0; xx < info->width; xx++) {
             int out_offset = xx * 4;
             int in_offset = xx * 3;

             line[out_offset] = frameLine[in_offset];
             line[out_offset+1] = frameLine[in_offset+1];
             line[out_offset+2] = frameLine[in_offset+2];
             line[out_offset+3] = 0;
         }
         pixels = (char*)pixels + info->stride;
     }
 }
 //-------------------------------------------

 //---------stringFromJNI native 方法的实现------------------
 jstring
 Java_com_example_hellojni_HelloJni_stringFromJNI( JNIEnv* env,
                                                         jobject thiz )
 {
      char str[25];
      sprintf(str, "%d", avcodec_version());
      return (*env)->NewStringUTF(env, str);
 }
 //---------------end--------------------------

 //---定义java函数,为了Java中的openFile方法调用-----
 void Java_com_example_hellojni_HelloJni_openFile(JNIEnv * env, jobject this)
 {
     int ret;
     int err;
     int i;
     AVCodec *pCodec;
     uint8_t *buffer;
     int numBytes;
     //注册所有的函数
     av_register_all();
     LOGE("Registered formats");
     //打开sdcard中的vid.3gp文件
     err = avformat_open_input(&pFormatCtx, "file:/sdcard/videos/IOT.avi", NULL, NULL);
     LOGE("Called open file");
     if(err!=0) {
         LOGE("Couldn't open file");
         return;
     }
     LOGE("Opened file");

     if(avformat_find_stream_info(pFormatCtx,NULL)<0) {
         LOGE("Unable to get stream info");
         return;
     }

     videoStream = -1;
     //定义设置videoStream
     for (i=0; i<pFormatCtx->nb_streams; i++) {
         if(pFormatCtx->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO) {
             videoStream = i;
             break;
         }
     }
     if(videoStream==-1) {
         LOGE("Unable to find video stream");
         return;
     }

     LOGI("Video stream is [%d]", videoStream);
     //定义编码类型
     pCodecCtx=pFormatCtx->streams[videoStream]->codec;
     //获取解码器
     pCodec=avcodec_find_decoder(pCodecCtx->codec_id);
     if(pCodec==NULL) {
         LOGE("Unsupported codec");
         return;
     }
     //使用特定的解码器打开
     if(avcodec_open2(pCodecCtx, pCodec,NULL)<0) {
         LOGE("Unable to open codec");
         return;
     }
     //分配帧空间
     pFrame=avcodec_alloc_frame();
     //分配RGB帧空间
     pFrameRGB=avcodec_alloc_frame();
     LOGI("Video size is [%d x %d]", pCodecCtx->width, pCodecCtx->height);
     //获取大小
     numBytes=avpicture_get_size(PIX_FMT_RGB24, pCodecCtx->width, pCodecCtx->height);
     //分配空间
     buffer=(uint8_t *)av_malloc(numBytes*sizeof(uint8_t));

     avpicture_fill((AVPicture *)pFrameRGB, buffer, PIX_FMT_RGB24,
                             pCodecCtx->width, pCodecCtx->height);
 }
 //-----------------------end--------------------

 //定义java回调函数,为了Java中的drawFrame方法调用-----
 void Java_com_example_hellojni_HelloJni_drawFrame(JNIEnv * env, jobject this, jstring bitmap)
 {
     AndroidBitmapInfo  info;
     void*              pixels;
     int                ret;

     int err;
     int i;
     int frameFinished = 0;
     AVPacket packet;
     static struct SwsContext *img_convert_ctx;
     int64_t seek_target;

     if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
         LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
         return;
     }
     LOGE("Checked on the bitmap");

     if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
         LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
     }
     LOGE("Grabbed the pixels");

     i = 0;
     while((i==0) && (av_read_frame(pFormatCtx, &packet)>=0)) {
         if(packet.stream_index==videoStream) {
             avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

             if(frameFinished) {
                 LOGE("packet pts %llu", packet.pts);
                 // This is much different than the tutorial, sws_scale
                 // replaces img_convert, but it's not a complete drop in.
                 // This version keeps the image the same size but swaps to
                 // RGB24 format, which works perfect for PPM output.
                 int target_width = 320;
                 int target_height = 240;
                 img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                        pCodecCtx->pix_fmt,
                        target_width, target_height, PIX_FMT_RGB24, SWS_BICUBIC,
                        NULL, NULL, NULL);
                 if(img_convert_ctx == NULL) {
                     LOGE("could not initialize conversion context\n");
                     return;
                 }
                 sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);

                 // save_frame(pFrameRGB, target_width, target_height, i);
                 fill_bitmap(&info, pixels, pFrameRGB);
                 i = 1;
             }
         }
         av_free_packet(&packet);
     }

     AndroidBitmap_unlockPixels(env, bitmap);
 }
 //-----------------------end--------------------------
 //内部调用函数,用来查找帧
 int seek_frame(int tsms)
 {
     int64_t frame;

     frame = av_rescale(tsms,pFormatCtx->streams[videoStream]->time_base.den,pFormatCtx->streams[videoStream]->time_base.num);
     frame/=1000;

     if(avformat_seek_file(pFormatCtx,videoStream,0,frame,frame,AVSEEK_FLAG_FRAME)<0) {
         return 0;
     }

     avcodec_flush_buffers(pCodecCtx);

     return 1;
 }
 //--------------------------------------

 //定义java回调函数,实现drawFrameAt方法
 void Java_com_example_hellojni_HelloJni_drawFrameAt(JNIEnv * env, jobject this, jstring bitmap, jint secs)
 {
     AndroidBitmapInfo  info;
     void*              pixels;
     int                ret;

     int err;
     int i;
     int frameFinished = 0;
     AVPacket packet;
     static struct SwsContext *img_convert_ctx;
     int64_t seek_target;

     if ((ret = AndroidBitmap_getInfo(env, bitmap, &info)) < 0) {
         LOGE("AndroidBitmap_getInfo() failed ! error=%d", ret);
         return;
     }
     LOGE("Checked on the bitmap");

     if ((ret = AndroidBitmap_lockPixels(env, bitmap, &pixels)) < 0) {
         LOGE("AndroidBitmap_lockPixels() failed ! error=%d", ret);
     }
     LOGE("Grabbed the pixels");

     seek_frame(secs * 1000);

     i = 0;
     while ((i== 0) && (av_read_frame(pFormatCtx, &packet)>=0)) {
         if(packet.stream_index==videoStream) {
             avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

             if(frameFinished) {
                 // This is much different than the tutorial, sws_scale
                 // replaces img_convert, but it's not a complete drop in.
                 // This version keeps the image the same size but swaps to
                 // RGB24 format, which works perfect for PPM output.
                 int target_width = 320;
                 int target_height = 240;
                 img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height,
                        pCodecCtx->pix_fmt,
                        target_width, target_height, PIX_FMT_RGB24, SWS_BICUBIC,
                        NULL, NULL, NULL);
                 if(img_convert_ctx == NULL) {
                     LOGE("could not initialize conversion context\n");
                     return;
                 }
                 sws_scale(img_convert_ctx, (const uint8_t* const*)pFrame->data, pFrame->linesize, 0, pCodecCtx->height, pFrameRGB->data, pFrameRGB->linesize);

                 // save_frame(pFrameRGB, target_width, target_height, i);
                 fill_bitmap(&info, pixels, pFrameRGB);
                 i = 1;
             }
         }
         av_free_packet(&packet);
     }

     AndroidBitmap_unlockPixels(env, bitmap);
 }
 //-------------------------end------------------------

PS:上述已修改过诸多旧版本ffmpeg中接口的使用 。

(3)Android.mk 的编写:

LOCAL_PATH := $(call my-dir)       //本地目录

 include $(CLEAR_VARS)
 PATH_TO_FFMPEG_SOURCE:=$(LOCAL_PATH)/ffmpeg
 LOCAL_C_INCLUDES += $(PATH_TO_FFMPEG_SOURCE)    //包含头文件
 LOCAL_LDLIBS := -lffmpeg -llog -ljnigraphics -lz -ldl -lgcc      //相关库
 LOCAL_MODULE    := hello-jni      
 LOCAL_SRC_FILES := hello-jni.c

 include $(BUILD_SHARED_LIBRARY)

(4)通过本地NDK 上述文件生成libs下必要的动态库,拷贝过来ffmpeg.so库。

(5)编写java的实现:

package com.example.hellojni;
  
 import android.app.Activity;
 import android.graphics.Bitmap;
 import android.widget.TextView;
 import android.os.Bundle;

 import android.view.View;
 import android.view.View.OnClickListener;
 import android.widget.Button;
 import android.widget.ImageView;
  
  
  public class HelloJni extends Activity
  {
         private Bitmap mBitmap;
         private int mSecs = 0;
  /** Called when the activity is first created. */
  @Override
  public void onCreate(Bundle savedInstanceState)
  {
     
  super.onCreate(savedInstanceState);
  
  setContentView(R.layout.main);  
  TextView tv1=(TextView)findViewById(R.id.text1);
  tv1.setText("Call avcodec_version from JNI:"+String.valueOf(stringFromJNI()));
  
  mBitmap = Bitmap.createBitmap(320, 240, Bitmap.Config.ARGB_8888);
  openFile();
  tv1.setText("open file successfully");

  Button btn = (Button)findViewById(R.id.frame_adv);
  btn.setOnClickListener(new OnClickListener() {
      public void onClick(View v) {
          drawFrame(mBitmap);
          ImageView i = (ImageView)findViewById(R.id.frame);
          i.setImageBitmap(mBitmap);
      }
  });
  
  Button btn_fwd = (Button)findViewById(R.id.frame_fwd);
  btn_fwd.setOnClickListener(new OnClickListener() {
      public void onClick(View v) {
          mSecs += 5;
          drawFrameAt(mBitmap, mSecs);
          ImageView i = (ImageView)findViewById(R.id.frame);
          i.setImageBitmap(mBitmap);
      }
  });
  
  Button btn_back = (Button)findViewById(R.id.frame_back);
  btn_back.setOnClickListener(new OnClickListener() {
      public void onClick(View v) {
          mSecs -= 5;
          drawFrameAt(mBitmap, mSecs);
          ImageView i = (ImageView)findViewById(R.id.frame);
          i.setImageBitmap(mBitmap);
      }
  });
  
  }

  public native String stringFromJNI();
  
  public native String unimplementedStringFromJNI();
  
  public static native void openFile();
  public static native void drawFrame(Bitmap bitmap);
  public static native void drawFrameAt(Bitmap bitmap, int secs);
  

  static {
  System.loadLibrary("ffmpeg");
  System.loadLibrary("hello-jni");
      }
  }

主要控件的实现,相关库的load以及相关方法的声明....

(6)通过adb push 相关视频文件到模拟SD卡目录下,接下来就在虚拟机上跑你的项目吧....

--------------------------------------------Bingo--------------------------------------------------------------------------

=================================

附Android JNI 开发过程:

1.建立Android工程,编写TestJNI.java类。如下:

[java] view plain copy



[java] view plain copy



[java] view plain copy

1. package com.testjni;  
2. import android.app.Activity;  
3. import android.os.Bundle;  
4. import android.util.Log;  
5.   
6. public class TestJNI extends Activity  
7. {  
8.     /** Called when the activity is first created. */  
9.     @Override  
10.     public void onCreate(Bundle savedInstanceState)  
11.     {  
12.         super.onCreate(savedInstanceState);  
13.         setContentView(R.layout.main);  
14.         Log.d("Hi", "Android call JNI: " + outputJNI());  
15.     }  
16.   
17.     private native String outputJNI();  
18.   
19.     static  
20.     {  
21.         System.loadLibrary("testjni");  
22.     }  
23. }  
[java] view plain copy
1. package com.testjni;  
2. import android.app.Activity;  
3. import android.os.Bundle;  
4. import android.util.Log;  
5.   
6. public class TestJNI extends Activity  
7. {  
8.     /** Called when the activity is first created. */  
9.     @Override  
10.     public void onCreate(Bundle savedInstanceState)  
11.     {  
12.         super.onCreate(savedInstanceState);  
13.         setContentView(R.layout.main);  
14.         Log.d("Hi", "Android call JNI: " + outputJNI());  
15.     }  
16.   
17.     private native String outputJNI();  
18.   
19.     static  
20.     {  
21.         System.loadLibrary("testjni");  
22.     }  
23. }

2.编译生成TestJNI.class文件
  编译方法:
  方法1:直接在Android下点击Build Project即可在工程的bin文件下生成相应的.class文件
  方法2:运行cmd,定位到工程目录,用javac TestJNI.java命令执行即可


3. 编译生成com_testjni_TestJNI.h文件。如下:
  javah -classpath bin\classes -d jni com.testjni.TestJNI
  或者
  javah -classpath C:/android/android-sdk/paltforms/android-8/android.jar;bin/classes -d jni com.testjni.TestJNI


4.编写testjni.c文件,放在路径 $PROJECT/jni/下。$PROJECT对应你的android应用项目的路径。

[cpp] view plain copy

    1. #include <jni.h>   
    2.   
    3. JNIEXPORT jstring JNICALL Java_com_testjni_TestJNI_outputJNI(JNIEnv *env, jobject obj)  
    4. {  
    5.     return (*env)->NewStringUTF(env, "Hello World!");  
    6. }  
    7.   
    8. /* This function will be call when the library first be load. 
    9.  * You can do some init in the library. return which version jni it support. 
    10.  */  
    11. jint JNI_onLoad(JavaVM* vm, void* reserved)  
    12. {  
    13.     void *venv;  
    14.   
    15.     if ((*vm)->GetEnv(vm, (void**)&venv, JNI_VERSION_1_4) != JNI_OK)  
    16.     {  
    17.         return -1;  
    18.     }  
    19.     return JNI_VERSION_1_4;  
    20. }  
    [cpp] view plain copy
    1. #include <jni.h>  
    2.   
    3. JNIEXPORT jstring JNICALL Java_com_testjni_TestJNI_outputJNI(JNIEnv *env, jobject obj)  
    4. {  
    5.     return (*env)->NewStringUTF(env, "Hello World!");  
    6. }  
    7.   
    8. /* This function will be call when the library first be load. 
    9.  * You can do some init in the library. return which version jni it support. 
    10.  */  
    11. jint JNI_onLoad(JavaVM* vm, void* reserved)  
    12. {  
    13.     void *venv;  
    14.   
    15.     if ((*vm)->GetEnv(vm, (void**)&venv, JNI_VERSION_1_4) != JNI_OK)  
    16.     {  
    17.         return -1;  
    18.     }  
    19.     return JNI_VERSION_1_4;  
    20. }


    5.写$PROJECT/jni/Android.mk,来描述的源文件。它的语法在docs/ANDROID-MK.html中有详细描述。如:
    # 用于在开发树中查找源文件,宏函数'my-dir',有编译系统提供,用于返回当前路径(即包含Android.mk文件的路径)
    LOCAL_PATH := $(call my-dir)


    #CLEAR_VARS由编译系统提供,指定让GNU MAKEFILE清除LOCAL_*的变量
    include $(CLEAR_VARS)


    # 当前模块的名称/编译的目标对象。编译系统会自动产生合适的前缀和后缀
    LOCAL_MODULE := testjni


    # 包含将要编译打包进模块中的C或者C++源代码文件(无需列出头文件和包含文件)
    LOCAL_SRC_FILES := testjni.c


    # BUILD_SHARED_LIBRARY表示编译生成共享库,是编译系统提供的变量,指向一个GNU Makefile脚本,
    # 负责收集自从上次调用'include ($CLEAR_VARS)'以来,定义在LOCA_*变量中的所有信息,并且决定编译什么,如果正确去编译。
    # 另: BUILD_STATIC_LIBRARY表示生成静态库: lib$(LOCAL_MODULE).a; BUILD_EXECUTABLE表示生成可执行文件。
    include $(BUILD_SHARED_LIBRARY)


    其他变量:
    LOCAL_C_INCLUDES := $(JNI_H_INCLUDE)  # 包含的头文件,这里需要包含JNI的头文件 
    LOCAL_SHARED_LIBRARIES := libutils    # 当前模块需要依赖的共享库
    LOCAL_PRELNK_MODULE := false          # 指明该模块是否被启动就加载,如不需要prelink,则为false


    5.1.可选的:在文件$PROJECT/jni/Application.mk中描述项目的更多细节。
      尽管你不需要从头写,但你可以处理多CPU问题以及改写编译/链接选项。(更多细节请观docs/APPLICATION-MK.html )。
      这个文件主要包含:
      -你的应用所需要模块的准确列表。
      -产生的机器码所对应的CPU架构。
      -可选的信息,像你要构建release还是 debug,特殊的C 或 C++编译参数以及其它需要应用到所有模块的构建选项。


      这个文件是可选的:默认情况下,NDK将构建在Android.mk中列出的所有模块的并且默认面向CPUABI (armeabi).
      有两种方法使用一个Application.mk:
      -将它放在$PROJECT/jni/Application.mk位置,那么它会被'ndk-build'脚本自动使用。
      -将它放在$NDK/apps/<name>/Application.mk,$NDK代表你的NDK安装路径。之后,在NDK路径下运行"make APP=<name>"。
      这是在NDKr4 之前的办法。出于兼容的原因,当前还是被支持的,但是我们强烈鼓励你使用第一种方法。因为它简单并且不用改动NDK安装路径下的路径树结构。


    7.在你的项目路径下或其任何子路径下运行"$NDK/ndk-build"来编译你的本地代码。
      这将启动NDK构建脚本,脚本将自动探测你的开发系统和应用项目文件来决定构建什么东西。例如:
      ndk-build
      ndk-build clean --> 清空所编译出的二进制文件们。
      ndk-build -B V=1 --> 强制完全重新编译,并显示命令