//初始化SwrContext
swr\_init(swr_cxt);
//重采样设置选项-----------------------------------------------------------end
//获取输出的声道个数
int out_channel_nb = av\_get\_channel\_layout\_nb\_channels(out_ch_layout);
jclass clazz = env->GetObjectClass(instance);
//调用Java方法MethodID
jmethodID methodId = env->GetMethodID(clazz,"createTrack","(II)V");
jmethodID methodID1 = env->GetMethodID(clazz,"playTrack","([BI)V");
//通过methodId调用Java方法
env->CallVoidMethod(instance,methodId,44100,out_channel_nb);
//存储pcm数据
uint8_t \*out_buf = (uint8_t\*)av\_malloc(2\*44100);
int got_frame, frame_count = 0;
//6.一帧一帧读取压缩的音频数据AVPacket
int ret;
while(av\_read\_frame(pFormatContext,avp) >= 0){
    if(avp->stream_index == audio_index){
        //解码从avpacket到avframe
        ret = avcodec\_decode\_audio4(pCodecContext,avf,&got_frame,avp);
        // =0 表示解码完成
        if(ret < 0){
            av\_log(NULL,AV_LOG_INFO,"解码完成 \n");
        }
        //表示正在解码
        if(got_frame != 0){
            LOGE("正在解码第%d帧 \n",++frame_count);
            swr\_convert(swr_cxt , &out_buf , 2 \* 44100 , (const uint8_t \*\*)avf->data , avf->nb_samples);
            //获取sample的size
            int out_buf_size = av\_samples\_get\_buffer\_size(NULL,out_channel_nb,avf->nb_samples,out_sample_fmt,1);
            jbyteArray audioArray = env->NewByteArray(out_buf_size);
            env->SetByteArrayRegion(audioArray,0,out_buf_size,(const jbyte\*)out_buf);
            //调用Java方法 把解码后的数据(audioArray)给AudioTrack进行播放
            env->CallVoidMethod(instance,methodID1,audioArray,out_buf_size);
            env->DeleteLocalRef(audioArray);
        }
    }
    av\_packet\_unref(avp);
}
av\_frame\_free(&avf);
swr\_free(&swr_cxt);
avcodec\_close(pCodecContext);
avformat\_close\_input(&pFormatContext);
env->ReleaseStringUTFChars(audioPath,path);


下面是Java播放器调用JNI的代码:
open class ThirdActivity : AppCompatActivity() {
 private val inputFilePath = “/storage/emulated/0/GreenCheng/video/g4.mp4”
 private var audioTrack:AudioTrack? = null
 private lateinit var musicPlayer: MusicPlayer
 override fun onCreate(savedInstanceState: Bundle?) {
 super.onCreate(savedInstanceState)
 setContentView(R.layout.activity_third)
 musicPlayer = MusicPlayer(inputFilePath)
 }
fun play(view: View) {
    surface_view.startPlay(inputFilePath)
}

fun playAudio(view: View){
    Thread(Runnable {
        Log.d("ThirdActivity", "------>>调用native方法")
        playAudio(inputFilePath)
    }).start()
// musicPlayer.playAudio()
 }
fun stopAudio(view: View){
    musicPlayer.stopAudio()
}

/\*\*

* 这是初始化AudioTrack的方法,也是在C中调用的

*/
 fun createTrack(sampleRateInHn:Int,nbChannel:Int){
 val channelConfig =when (nbChannel) {
 1 -> AudioFormat.CHANNEL_OUT_MONO
 2 -> AudioFormat.CHANNEL_OUT_STEREO
 else -> AudioFormat.CHANNEL_OUT_MONO
 }
 val bufferSize = AudioTrack.getMinBufferSize(sampleRateInHn,channelConfig,AudioFormat.ENCODING_PCM_16BIT)
 audioTrack = AudioTrack(AudioManager.STREAM_MUSIC,sampleRateInHn,channelConfig,AudioFormat.ENCODING_PCM_16BIT,bufferSize,AudioTrack.MODE_STREAM)
 audioTrack?.play()
 }/\*\*
/\*\*

* 这是在C代码中调用的,就是在解码出PCM就会调用这个方法,让AudioTrack进行播放

*/
 fun playTrack(buffer:ByteArray,length:Int){
 if(audioTrack != null && audioTrack?.playState == AudioTrack.PLAYSTATE_PLAYING){
 audioTrack?.write(buffer,0,length)
 }
 }
private external fun playAudio(path:String)

companion object {
    init {
        System.loadLibrary("native-lib")
    }
}


使用OpenSL EL进行音频播放的代码:  
 JNI代码
SLObjectItf engineObject=NULL;//用SLObjectItf声明引擎接口对象
 SLEngineItf engineEngine = NULL;//声明具体的引擎对象SLObjectItf outputMixObject = NULL;//用SLObjectItf创建混音器接口对象
 SLEnvironmentalReverbItf outputMixEnvironmentalReverb = NULL;具体的混音器对象实例
 SLEnvironmentalReverbSettings settings = SL_I3DL2_ENVIRONMENT_PRESET_DEFAULT;//默认情况SLObjectItf audioplayer=NULL;//用SLObjectItf声明播放器接口对象
 SLPlayItf slPlayItf=NULL;//播放器接口
 SLAndroidSimpleBufferQueueItf slBufferQueueItf=NULL;//缓冲区队列接口size_t buffersize =0;
 void *buffer;
 //将pcm数据添加到缓冲区中
 void getQueueCallBack(SLAndroidSimpleBufferQueueItf slBufferQueueItf, void* context){
buffersize=0;

getPcm(&buffer,&buffersize);
if(buffer!=NULL&&buffersize!=0){
    //将得到的数据加入到队列中
    (\*slBufferQueueItf)->Enqueue(slBufferQueueItf,buffer,buffersize);
}


//创建引擎
 void createEngine(){
 slCreateEngine(&engineObject,0,NULL,0,NULL,NULL);//创建引擎
 (*engineObject)->Realize(engineObject,SL_BOOLEAN_FALSE);//实现engineObject接口对象
 (*engineObject)->GetInterface(engineObject,SL_IID_ENGINE,&engineEngine);//通过引擎调用接口初始化SLEngineItf
 }//创建混音器
 void createMixVolume(){
 (*engineEngine)->CreateOutputMix(engineEngine,&outputMixObject,0,0,0);//用引擎对象创建混音器接口对象
 (*outputMixObject)->Realize(outputMixObject,SL_BOOLEAN_FALSE);//实现混音器接口对象
 SLresult sLresult = (*outputMixObject)->GetInterface(outputMixObject,SL_IID_ENVIRONMENTALREVERB,&outputMixEnvironmentalReverb);//利用混音器实例对象接口初始化具体的混音器对象
 //设置
 if (SL_RESULT_SUCCESS == sLresult) {
 (*outputMixEnvironmentalReverb)->
 SetEnvironmentalReverbProperties(outputMixEnvironmentalReverb, &settings);
 }
 }//创建播放器
 void createPlayer(const char* path){
 //初始化ffmpeg
 int rate;
 int channels;
 createFFmpeg(&rate,&channels,path);
 LOGE(“RATE %d”,rate);
 LOGE(“channels %d”,channels);
 /*
 * typedef struct SLDataLocator_AndroidBufferQueue_ {
 SLuint32 locatorType;//缓冲区队列类型
 SLuint32 numBuffers;//buffer位数
 } */
SLDataLocator_AndroidBufferQueue android_queue = {SL_DATALOCATOR_ANDROIDSIMPLEBUFFERQUEUE,2};
/\*\*
typedef struct SLDataFormat_PCM_ {
 SLuint32 formatType; pcm
 SLuint32 numChannels; 通道数
 SLuint32 samplesPerSec; 采样率
 SLuint32 bitsPerSample; 采样位数
 SLuint32 containerSize; 包含位数
 SLuint32 channelMask; 立体声
 SLuint32 endianness; end标志位
 } SLDataFormat_PCM;
 */
 SLDataFormat_PCM pcm = {SL_DATAFORMAT_PCM,(SLuint32)channels,(SLuint32)rate*1000
 ,SL_PCMSAMPLEFORMAT_FIXED_16
 ,SL_PCMSAMPLEFORMAT_FIXED_16
 ,SL_SPEAKER_FRONT_LEFT|SL_SPEAKER_FRONT_RIGHT,SL_BYTEORDER_LITTLEENDIAN};


* typedef struct SLDataSource_ {
 void *pLocator;//缓冲区队列
 void *pFormat;//数据样式,配置信息
 } SLDataSource;
 * */
 SLDataSource dataSource = {&android_queue,&pcm};
SLDataLocator_OutputMix slDataLocator_outputMix={SL_DATALOCATOR_OUTPUTMIX,outputMixObject};


SLDataSink slDataSink = {&slDataLocator_outputMix,NULL};


const SLInterfaceID ids[3]={SL_IID_BUFFERQUEUE,SL_IID_EFFECTSEND,SL_IID_VOLUME};
const SLboolean req[3]={SL_BOOLEAN_FALSE,SL_BOOLEAN_FALSE,SL_BOOLEAN_FALSE};

/\*
* SLresult (*CreateAudioPlayer) (
 SLEngineItf self,
 SLObjectItf * pPlayer,
 SLDataSource *pAudioSrc,//数据设置
 SLDataSink *pAudioSnk,//关联混音器
 SLuint32 numInterfaces,
 const SLInterfaceID * pInterfaceIds,
 const SLboolean * pInterfaceRequired
 );
 * */
 LOGE(“执行到此处”)
 (*engineEngine)->CreateAudioPlayer(engineEngine,&audioplayer,&dataSource,&slDataSink,3,ids,req);
 (*audioplayer)->Realize(audioplayer,SL_BOOLEAN_FALSE);
 LOGE(“执行到此处2”)
 (*audioplayer)->GetInterface(audioplayer,SL_IID_PLAY,&slPlayItf);//初始化播放器
 //注册缓冲区,通过缓冲区里面 的数据进行播放
 (*audioplayer)->GetInterface(audioplayer,SL_IID_BUFFERQUEUE,&slBufferQueueItf);
 //设置回调接口
 (*slBufferQueueItf)->RegisterCallback(slBufferQueueItf,getQueueCallBack,NULL);
 //播放
 (*slPlayItf)->SetPlayState(slPlayItf,SL_PLAYSTATE_PLAYING);
//开始播放
getQueueCallBack(slBufferQueueItf,NULL);
}
 //释放资源
 void releaseResource(){
 if(audioplayer!=NULL){
 (*audioplayer)->Destroy(audioplayer);
 audioplayer=NULL;
 slBufferQueueItf=NULL;
 slPlayItf=NULL;
 }
 if(outputMixObject!=NULL){
 (*outputMixObject)->Destroy(outputMixObject);
 outputMixObject=NULL;
 outputMixEnvironmentalReverb=NULL;
 }
 if(engineObject!=NULL){
 (*engineObject)->Destroy(engineObject);
 engineObject=NULL;
 engineEngine=NULL;
 }
 releaseFFmpeg();
 }AVFormatContext *pFormatCtx;
 AVCodecContext *pCodecCtx;
 AVCodec *pCodex;
 AVPacket *packet;
 AVFrame *frame;
 SwrContext *swrContext;
 uint8_t *out_buffer;
 int out_channer_nb;
 int audio_stream_idx=-1;
 //opensl es调用 int * rate,int *channel
 int createFFmpeg(int *rate,int *channel,const char* path){
 av_register_all();
 const char *input = path;
 pFormatCtx = avformat_alloc_context();
 LOGE(“Lujng %s”,input);
 LOGE(“xxx %p”,pFormatCtx);
 int error;
 char buf[] = “”;
 //打开视频地址并获取里面的内容(解封装)
 if (error = avformat_open_input(&pFormatCtx, input, NULL, NULL) < 0) {
 av_strerror(error, buf, 1024);
 // LOGE(“%s” ,inputPath)
 LOGE(“Couldn’t open file %s: %d(%s)”, input, error, buf);
 // LOGE(“%d”,error)
 LOGE(“打开视频失败”);
 }
 //3.获取视频信息
 if(avformat_find_stream_info(pFormatCtx,NULL) < 0){
 LOGE(“%s”,“获取视频信息失败”);
 return -1;
 }
int i=0;
for (int i = 0; i < pFormatCtx->nb_streams; ++i) {
    if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
        LOGE(" 找到音频id %d", pFormatCtx->streams[i]->codec->codec_type);
        audio_stream_idx=i;
        break;
    }
}