opengl真是博大精深,现在只是网上扒拉了一顿代码实现了视频的渲染,就暂时做下记录吧。

https://www.jianshu.com/p/ec8af2c459c6 这篇文章写的很好,感谢这位大神。

不废话了,上代码:

//
// Created by yuanxuzhen on 6/1/21.
//
#include "yuan_open_gl.h"

#define GET_STR(x) #x

static const char *vertexShader = GET_STR(

        attribute  vec4 aPosition; //顶点坐标,在外部获取传递进来

        attribute vec2 aTexCoord; //材质(纹理)顶点坐标

        varying vec2 vTexCoord;   //输出的材质(纹理)坐标,给片元着色器使用
        void main() {
            //纹理坐标转换,以左上角为原点的纹理坐标转换成以左下角为原点的纹理坐标,
            // 比如以左上角为原点的(0,0)对应以左下角为原点的纹理坐标的(0,1)
            vTexCoord = vec2(aTexCoord.x, 1.0 - aTexCoord.y);
            gl_Position = aPosition;
        }
);

static const char *fragYUV420P = GET_STR(

        precision mediump float;    //精度

        varying vec2 vTexCoord;     //顶点着色器传递的坐标,相同名字opengl会自动关联

        uniform sampler2D yTexture; //输入的材质(不透明灰度,单像素)

        uniform sampler2D uTexture;

        uniform sampler2D vTexture;
        void main() {
            vec3 yuv;
            vec3 rgb;
            yuv.r = texture2D(yTexture, vTexCoord).r; // y分量
            // 因为UV的默认值是127,所以我们这里要减去0.5(OpenGLES的Shader中会把内存中0~255的整数数值换算为0.0~1.0的浮点数值)
            yuv.g = texture2D(uTexture, vTexCoord).r - 0.5; // u分量
            yuv.b = texture2D(vTexture, vTexCoord).r - 0.5; // v分量
            // yuv转换成rgb,两种方法,一种是RGB按照特定换算公式单独转换
            // 另外一种是使用矩阵转换
            rgb = mat3(1.0, 1.0, 1.0,
                       0.0, -0.39465, 2.03211,
                       1.13983, -0.58060, 0.0) * yuv;
            //输出像素颜色
            gl_FragColor = vec4(rgb, 1.0);
        }
);

GLint InitShader(const char *code, GLint type) {
    //创建shader
    GLint sh = glCreateShader(type);
    if (sh == 0) {
        LOGE("glCreateShader %d failed!", type);
        return 0;
    }
    //加载shader
    glShaderSource(sh,
                   1,    //shader数量
                   &code, //shader代码
                   0);   //代码长度
    //编译shader
    glCompileShader(sh);

    //获取编译情况
    GLint status;
    glGetShaderiv(sh, GL_COMPILE_STATUS, &status);
    if (status == 0) {
        LOGE("glCompileShader failed!");
        return 0;
    }
    LOGE("glCompileShader success!");
    return sh;
}

static double r2d(AVRational r) {
    return r.num == 0 || r.den == 0 ? 0 : (double) r.num / (double) r.den;
}




void play_video(JNIEnv *env, jclass clazz, jstring video_path,
                jobject surface){

    const char *videoPath = (*env)->GetStringUTFChars(env, video_path, 0);
    LOGE("PlayVideo: %s", videoPath);

    if (videoPath == NULL) {
        LOGE("videoPath is null");
        return;
    }

    AVFormatContext *formatContext = avformat_alloc_context();

    // open video file
    LOGI("Open video file");
    if (avformat_open_input(&formatContext, videoPath, NULL, NULL) != 0) {
        LOGE("Cannot open video file: %s\n", videoPath);
        return;
    }

    // Retrieve stream information
    LOGI("Retrieve stream information");
    if (avformat_find_stream_info(formatContext, NULL) < 0) {
        LOGE("Cannot find stream information.");
        return;
    }

    // Find the first video stream
    LOGI("Find video stream");
    int video_stream_index = -1;
    for (int i = 0; i < formatContext->nb_streams; i++) {
        if (formatContext->streams[i]->codecpar->codec_type == AVMEDIA_TYPE_VIDEO) {
            video_stream_index = i;
        }
    }

    if (video_stream_index == -1) {
        LOGE("No video stream found.");
        return; // no video stream found.
    }

    // Get a pointer to the codec context for the video stream
    LOGI("Get a pointer to the codec context for the video stream");
    AVCodecParameters *codecParameters = formatContext->streams[video_stream_index]->codecpar;

    // Find the decoder for the video stream
    LOGI("Find the decoder for the video stream");
    AVCodec *codec = avcodec_find_decoder(codecParameters->codec_id);
    if (codec == NULL) {
        LOGE("Codec not found.");
        return; // Codec not found
    }

    AVCodecContext *codecContext = avcodec_alloc_context3(codec);

    if (codecContext == NULL) {
        LOGE("CodecContext not found.");
        return; // CodecContext not found
    }

    // fill CodecContext according to CodecParameters
    if (avcodec_parameters_to_context(codecContext, codecParameters) < 0) {
        LOGE("Fill CodecContext failed.");
        return;
    }

    // init codex context
    LOGI("open Codec");
    if (avcodec_open2(codecContext, codec, NULL)) {
        LOGE("Init CodecContext failed.");
        return;
    }

    enum AVPixelFormat dstFormat = AV_PIX_FMT_YUV420P;

    // Allocate av packet
    AVPacket *packet = av_packet_alloc();
    if (packet == NULL) {
        LOGE("Could not allocate av packet.");
        return;
    }

    // Allocate video frame
    LOGI("Allocate video frame");
    AVFrame *frame = av_frame_alloc();
    // Allocate render frame
    LOGI("Allocate render frame");
    AVFrame *renderFrame = av_frame_alloc();

    if (frame == NULL || renderFrame == NULL) {
        LOGE("Could not allocate video frame.");
        return;
    }

    // Determine required buffer size and allocate buffer
    LOGI("Determine required buffer size and allocate buffer");
    int size = av_image_get_buffer_size(dstFormat, codecContext->width, codecContext->height, 1);
    uint8_t *buffer = (uint8_t *) av_malloc(size * sizeof(uint8_t));
    av_image_fill_arrays(renderFrame->data, renderFrame->linesize, buffer, dstFormat,
                         codecContext->width, codecContext->height, 1);

    // init SwsContext
    LOGI("init SwsContext");
    struct SwsContext *swsContext = sws_getContext(codecContext->width,
                                                   codecContext->height,
                                                   codecContext->pix_fmt,
                                                   codecContext->width,
                                                   codecContext->height,
                                                   dstFormat,
                                                   SWS_BILINEAR,
                                                   NULL,
                                                   NULL,
                                                   NULL);
    if (swsContext == NULL) {
        LOGE("Init SwsContext failed.");
        return;
    }

    // native window
    LOGI("native window");


    // get video width , height
    LOGI("get video width , height");
    int videoWidth = codecContext->width;
    int videoHeight = codecContext->height;
    LOGI("VideoSize: [%d,%d]", videoWidth, videoHeight);

    // 设置native window的buffer大小,可自动拉伸
    LOGI("set native window");


    EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
    if (display == EGL_NO_DISPLAY) {
        LOGE("eglGetDisplay failed");
         goto __ERROR;
    }

    if (EGL_TRUE != eglInitialize(display, 0, 0)) {
        LOGE("eglGetDisplay init failed");
        goto __ERROR;
    }

    EGLConfig config;
    EGLint config_num;
    EGLint config_spec[] = {
            EGL_RED_SIZE, 8,
            EGL_GREEN_SIZE, 8,
            EGL_BLUE_SIZE, 8,
            EGL_SURFACE_TYPE, EGL_WINDOW_BIT, EGL_NONE
    };
    if (EGL_TRUE != eglChooseConfig(display, config_spec, &config, 1, &config_num)) {
        LOGE("eglChooseConfig failed!");
        goto __ERROR;
    }
    ANativeWindow *nwin = ANativeWindow_fromSurface(env, surface);

    //创建surface
    EGLSurface winsurface = eglCreateWindowSurface(display, config, nwin, 0);
    if (winsurface == EGL_NO_SURFACE) {
        LOGE("eglCreateWindowSurface failed!");
        goto __ERROR;
    }

    const EGLint ctxAttr[] = {
            EGL_CONTEXT_CLIENT_VERSION, 2, EGL_NONE
    };
    EGLContext context = eglCreateContext(display, config, EGL_NO_CONTEXT, ctxAttr);
    if (context == EGL_NO_CONTEXT) {
        LOGE("eglCreateContext failed!");
        goto __ERROR;
    }
    if (EGL_TRUE != eglMakeCurrent(display, winsurface, winsurface, context)) {
        LOGE("eglMakeCurrent failed!");
        goto __ERROR;
    }




    //顶点和片元shader初始化
    //顶点shader初始化
    GLint vsh = InitShader(vertexShader, GL_VERTEX_SHADER);
    //片元yuv420 shader初始化
    GLint fsh = InitShader(fragYUV420P, GL_FRAGMENT_SHADER);


    /
    //创建渲染程序
    GLint program = glCreateProgram();
    if (program == 0) {
        LOGE("glCreateProgram failed!");
        return;
    }
    //渲染程序中加入着色器代码
    glAttachShader(program, vsh);
    glAttachShader(program, fsh);

    //链接程序
    glLinkProgram(program);
    GLint status = 0;
    glGetProgramiv(program, GL_LINK_STATUS, &status);
    if (status != GL_TRUE) {
        LOGE("glLinkProgram failed!");
        return;
    }
    glUseProgram(program);
    LOGE("glLinkProgram success!");
    /


    //加入三维顶点数据 两个三角形组成正方形
    static float vers[] = {
            1.0f, -1.0f, 0.0f,
            -1.0f, -1.0f, 0.0f,
            1.0f, 1.0f, 0.0f,
            -1.0f, 1.0f, 0.0f,
    };
    GLuint apos = (GLuint) glGetAttribLocation(program, "aPosition");
    glEnableVertexAttribArray(apos);
    //传递顶点
    glVertexAttribPointer(apos, 3, GL_FLOAT, GL_FALSE, 12, vers);

    //加入材质坐标数据
    static float txts[] = {
            1.0f, 0.0f, //右下
            0.0f, 0.0f,
            1.0f, 1.0f,
            0.0, 1.0
    };
    GLuint atex = (GLuint) glGetAttribLocation(program, "aTexCoord");
    glEnableVertexAttribArray(atex);
    glVertexAttribPointer(atex, 2, GL_FLOAT, GL_FALSE, 8, txts);

    //材质纹理初始化
    //设置纹理层
    glUniform1i(glGetUniformLocation(program, "yTexture"), 0); //对于纹理第1层
    glUniform1i(glGetUniformLocation(program, "uTexture"), 1); //对于纹理第2层
    glUniform1i(glGetUniformLocation(program, "vTexture"), 2); //对于纹理第3层

    //创建opengl纹理
    GLuint texts[3] = {0};
    //创建三个纹理
    glGenTextures(3, texts);

    //设置纹理属性
    glBindTexture(GL_TEXTURE_2D, texts[0]);
    //缩小的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0,           //细节基本 0默认
                 GL_LUMINANCE,//gpu内部格式 亮度,灰度图
                 videoWidth, videoHeight, //拉升到全屏
                 0,             //边框
                 GL_LUMINANCE,//数据的像素格式 亮度,灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, //像素的数据类型
                 NULL                    //纹理的数据
    );

    //设置纹理属性
    glBindTexture(GL_TEXTURE_2D, texts[1]);
    //缩小的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0,           //细节基本 0默认
                 GL_LUMINANCE,//gpu内部格式 亮度,灰度图
                 videoWidth / 2, videoHeight / 2, //拉升到全屏
                 0,             //边框
                 GL_LUMINANCE,//数据的像素格式 亮度,灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, //像素的数据类型
                 NULL                    //纹理的数据
    );

    //设置纹理属性
    glBindTexture(GL_TEXTURE_2D, texts[2]);
    //缩小的过滤器
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
    //设置纹理的格式和大小
    glTexImage2D(GL_TEXTURE_2D,
                 0,           //细节基本 0默认
                 GL_LUMINANCE,//gpu内部格式 亮度,灰度图
                 videoWidth / 2, videoHeight / 2, //拉升到全屏
                 0,             //边框
                 GL_LUMINANCE,//数据的像素格式 亮度,灰度图 要与上面一致
                 GL_UNSIGNED_BYTE, //像素的数据类型
                 NULL                    //纹理的数据
    );


    //
    纹理的修改和显示
    unsigned char *buf[3] = {0};
    buf[0] = malloc(videoWidth * videoHeight);
    buf[1] =  malloc(videoWidth * videoHeight/4);
    buf[2] = malloc(videoWidth * videoHeight/4);


    LOGI("read frame");
    while (av_read_frame(formatContext, packet) == 0) {
        // Is this a packet from the video stream?
        if (packet->stream_index == video_stream_index) {

            // Send origin data to decoder
            int sendPacketState = avcodec_send_packet(codecContext, packet);
            if (sendPacketState == 0) {
                LOGE("向解码器-发送数据");

                int receiveFrameState = avcodec_receive_frame(codecContext, frame);
                if (receiveFrameState == 0) {
                    LOGE("从解码器-接收数据");
                    // lock native window buffer


                    // 格式转换
                    sws_scale(swsContext, (uint8_t const *const *) frame->data,
                              frame->linesize, 0, codecContext->height,
                              renderFrame->data, renderFrame->linesize);

                    buf[0] = frame->data[0];

                    memcpy(buf[0],frame->data[0],videoWidth*videoHeight);
                    // 数据U
                    memcpy(buf[1],frame->data[1],videoWidth*videoHeight/4);

                    // 数据V
                    memcpy(buf[2],frame->data[2],videoWidth*videoHeight/4);

                    //激活第1层纹理,绑定到创建的opengl纹理
                    glActiveTexture(GL_TEXTURE0);
                    glBindTexture(GL_TEXTURE_2D,texts[0]);
                    //替换纹理内容
                    glTexSubImage2D(GL_TEXTURE_2D,0,0,0,videoWidth,videoHeight,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[0]);


                    //激活第2层纹理,绑定到创建的opengl纹理
                    glActiveTexture(GL_TEXTURE0+1);
                    glBindTexture(GL_TEXTURE_2D,texts[1]);
                    //替换纹理内容
                    glTexSubImage2D(GL_TEXTURE_2D,0,0,0,videoWidth/2,videoHeight/2,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[1]);


                    //激活第2层纹理,绑定到创建的opengl纹理
                    glActiveTexture(GL_TEXTURE0+2);
                    glBindTexture(GL_TEXTURE_2D,texts[2]);
                    //替换纹理内容
                    glTexSubImage2D(GL_TEXTURE_2D,0,0,0,videoWidth/2,videoHeight/2,GL_LUMINANCE,GL_UNSIGNED_BYTE,buf[2]);

                    //三维绘制
                    glDrawArrays(GL_TRIANGLE_STRIP,0,4);
                    //窗口显示
                    eglSwapBuffers(display,winsurface);
                } else if (receiveFrameState == AVERROR(EAGAIN)) {
                    LOGE("从解码器-接收-数据失败:AVERROR(EAGAIN)");
                } else if (receiveFrameState == AVERROR_EOF) {
                    LOGE("从解码器-接收-数据失败:AVERROR_EOF");
                } else if (receiveFrameState == AVERROR(EINVAL)) {
                    LOGE("从解码器-接收-数据失败:AVERROR(EINVAL)");
                } else {
                    LOGE("从解码器-接收-数据失败:未知");
                }
            } else if (sendPacketState == AVERROR(EAGAIN)) {//发送数据被拒绝,必须尝试先读取数据
                LOGE("向解码器-发送-数据包失败:AVERROR(EAGAIN)");//解码器已经刷新数据但是没有新的数据包能发送给解码器
            } else if (sendPacketState == AVERROR_EOF) {
                LOGE("向解码器-发送-数据失败:AVERROR_EOF");
            } else if (sendPacketState == AVERROR(EINVAL)) {//遍解码器没有打开,或者当前是编码器,也或者需要刷新数据
                LOGE("向解码器-发送-数据失败:AVERROR(EINVAL)");
            } else if (sendPacketState == AVERROR(ENOMEM)) {//数据包无法压如解码器队列,也可能是解码器解码错误
                LOGE("向解码器-发送-数据失败:AVERROR(ENOMEM)");
            } else {
                LOGE("向解码器-发送-数据失败:未知");
            }

        }
        av_packet_unref(packet);
    }

    __ERROR:
    //内存释放
    LOGI("release memory");
    if(nwin != NULL){
        ANativeWindow_release(nwin);
    }
    if (winsurface != EGL_NO_SURFACE) {
        LOGE("eglCreateWindowSurface failed!");
        eglDestroySurface(display, winsurface);
    }


    if (context != EGL_NO_CONTEXT) {
        eglDestroyContext(display, context);
    }


    av_frame_free(&frame);
    av_frame_free(&renderFrame);
    av_packet_free(&packet);
    avcodec_close(codecContext);
    avcodec_free_context(&codecContext);
    avformat_close_input(&formatContext);
    avformat_free_context(formatContext);
    (*env)->ReleaseStringUTFChars(env, video_path, videoPath);

}

 

完整代码gitee地址

https://gitee.com/creat151/ffmpeg-android.git