概述

在上一篇文章Android音视频录制概述。已经大概讲述了Surafce录制的原理,如果大家还没看那篇文章,请先去查看哈。

后续文章介绍Android音视频录制(2)——buffer录制

Android全关键帧视频录制——视频编辑必备

Surface录制视频数据一共分为以下几个部分:
1,摄像头预览预览
包括A:摄像头配置 B:基于GLSurface View的OpenGL绘制

2,编码器编码数据
包括A:编码器配置 B:EGL配置 C:基于编码器Surface的OpenGL绘制

3,视频数据混合(输出到文件)

音频数据录制分为以下几个部分:
1,音频采集线程
2,音频数据编码
3,音频数据混合(输出到文件)

代码讲解部分,我只讲解关键代码,之后我会把代码上传到git,具体的自己下载阅读,讲解过程我个人习惯把讲解内容放在对应的代码注释里,所以,一般看代码注释就能看懂代码逻辑。

视频录制

摄像头预览
综述流程:Activity启动,GLSurfaceView被创建,在它的创建完成回调中加载opengl环境(即需要加载顶点shader和片元shader),并从opengl中获取要渲染的纹理(SurfaceTexure),将纹理和GLSurfaceView绑定,然后创建opengl绘制器,最后初始化相机,初始化相机的时候将相机和纹理绑定,并开始预览,这就是预览的流程,这个流程的核心就是摄像头(Camera)——OpenGL纹理(SurfaceTexture)——GLSurfaceView的相互连接起来,当然其中细节很多,小伙伴看代码注释就清晰明了了。

package lda.com.camerasurfacerecorder;

import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.WindowManager;

import java.io.IOException;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;

/**
 * 继承GLSurfaceView
 */

public class CameraGLSurfaceView extends GLSurfaceView {
    private static final int CAMERA_ID = 0;
    private Context mContext;
    private static final String TAG = CameraGLSurfaceView.class.getSimpleName();
    private SurfaceRenderer mRenderer;//OpenGL渲染器
    private Camera mCamera;
    private int mRotation;
    private boolean mIsFrontFace;
    private int mVideoWidth = Config.VIDEO_WIDTH, mVideoHeight = Config.VIDEO_HEIGHT;

    public CameraGLSurfaceView(Context context) {
        super(context);
        init(context);
    }

    public CameraGLSurfaceView(Context context, AttributeSet attrs) {
        super(context, attrs);
        init(context);
    }

    @TargetApi(8)
    private void init(Context context) {
        mContext = context;
        mRenderer = new SurfaceRenderer(this);
        // GLES 2.0, API >= 8
        setEGLContextClientVersion(2);
        setRenderer(mRenderer);
/*      // 设置RENDERMODE_WHEN_DIRTY可以减少性能消耗
        setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY); */
    }

    /**
     * @param holder
     */
    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
        stopPreview();
        mRenderer.onSurfaceDestroy();
        super.surfaceDestroyed(holder);
    }

    public void startPreview(int width, int height){
        width = Config.VIDEO_HEIGHT;
        height = Config.VIDEO_WIDTH;
        initCamera(width, height);
        if(mCamera == null){
            return;
        }
        try {
            final Camera.Size previewSize = mCamera.getParameters().getPreviewSize();
            Log.i(TAG, String.format("previewSize(%d, %d)", previewSize.width, previewSize.height));
            setVideoSize(previewSize.width, previewSize.height);
            final SurfaceTexture st = mRenderer.getSurfaceTexture();
            st.setDefaultBufferSize(previewSize.width, previewSize.height);
            mCamera.setPreviewTexture(st);//相机和opengl纹理绑定
            if (mCamera != null) {
                //开启摄像头预览
                mCamera.startPreview();
            }
        }catch (Exception e){
            Log.e(TAG, "startPreview:", e);
            if (mCamera != null) {
                mCamera.release();
                mCamera = null;
            }
        }

    }

    /**
     * 初始化相机
     * @param width
     * @param height
     */
    private void initCamera(int width, int height) {
        Log.d(TAG, "initCamera:");
        if (mCamera == null) {
            try {
                mCamera = Camera.open(CAMERA_ID);
                final Camera.Parameters params = mCamera.getParameters();
                final List<String> focusModes = params.getSupportedFocusModes();
                if (focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {
                    params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
                } else if(focusModes
                        .contains(Camera.Parameters.FOCUS_MODE_AUTO)) {
                    params.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
                } else {
                    Log.i(TAG, "Camera does not support autofocus");
                }
                final List<int[]> supportedFpsRange = params.getSupportedPreviewFpsRange();
                final int[] max_fps = supportedFpsRange.get(supportedFpsRange.size() - 1);
                params.setPreviewFpsRange(max_fps[0], max_fps[1]);
                params.setRecordingHint(true);
                final Camera.Size closestSize = getClosestSupportedSize(params.getSupportedPreviewSizes(), width, height);
                params.setPreviewSize(closestSize.width, closestSize.height);
                final Camera.Size pictureSize = getClosestSupportedSize(params.getSupportedPictureSizes(), width, height);
                params.setPictureSize(pictureSize.width, pictureSize.height);
                //调整相机角度
                setRotation(params);
                mCamera.setParameters(params);
            } catch (Exception e) {
                Log.e(TAG, "initCamera:", e);
                if (mCamera != null) {
                    mCamera.release();
                    mCamera = null;
                }
            }
        }
    }

    public void stopPreview(){
        Log.v(TAG, "stopPreview:");
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
        }
    }


    public void setVideoSize(final int width, final int height) {
        if ((mRotation % 180) == 0) {
            mVideoWidth = width;
            mVideoHeight = height;
        } else {
            mVideoWidth = height;
            mVideoHeight = width;
        }
        //调整OpenGL视口
        queueEvent(new Runnable() {
            @Override
            public void run() {
                mRenderer.updateViewport();
            }
        });
        Log.d(TAG, "setVideoSize: width x height=" + width + " x " + height );
    }

    private static Camera.Size getClosestSupportedSize(List<Camera.Size> supportedSizes, final int requestedWidth, final int requestedHeight) {
        return (Camera.Size) Collections.min(supportedSizes, new Comparator<Camera.Size>() {

            private int diff(final Camera.Size size) {
                return Math.abs(requestedWidth - size.width) + Math.abs(requestedHeight - size.height);
            }

            @Override
            public int compare(final Camera.Size lhs, final Camera.Size rhs) {
                return diff(lhs) - diff(rhs);
            }
        });
    }

    /**
     * 设置摄像头角度
     * @param params
     */
    private final void setRotation(final Camera.Parameters params) {
        final Display display = ((WindowManager)mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
        final int rotation = display.getRotation();
        int degrees = 0;
        switch (rotation) {
            case Surface.ROTATION_0: degrees = 0; break;
            case Surface.ROTATION_90: degrees = 90; break;
            case Surface.ROTATION_180: degrees = 180; break;
            case Surface.ROTATION_270: degrees = 270; break;
        }
        final Camera.CameraInfo info = new android.hardware.Camera.CameraInfo();
        android.hardware.Camera.getCameraInfo(CAMERA_ID, info);
        mIsFrontFace = (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT);
        if (mIsFrontFace) { // 前置摄像头
            degrees = (info.orientation + degrees) % 360;
            degrees = (360 - degrees) % 360;  // reverse
        } else {  // 后置摄像头
            degrees = (info.orientation - degrees + 360) % 360;
        }
        mCamera.setDisplayOrientation(degrees);
        mRotation = degrees;
        Log.d(TAG, "setRotation:" + degrees);
    }

    public int getVideoHeight() {
        return mVideoHeight;
    }

    public int getVideoWidth() {
        return mVideoWidth;
    }

    public void startRecord() {
        mRenderer.setNeedRecord(true);
    }


    public void stopRecord() {
        mRenderer.setStopRecorder(true);
    }
}

CameraGLSurfaceView这个类即是安卓上层的GLSurfaceView,其中关键函数是初始化摄像头(initCamera),并让摄像头和OpenGL纹理绑定。

SurfaceRenderer是摄像头预览的OpenGL渲染器,关键函数是onSurfaceCreated初始化opengl环境,并获取opengl纹理,onDrawFrame执行opengl绘制,并将纹理缓存数据传递给视频编码器,同时视频和音频编码器的初始化和启动也是在onDrawFrame中完成,编码器内容后面详述。

package lda.com.camerasurfacerecorder;

import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.opengl.Matrix;
import android.os.Environment;
import android.util.Log;

import java.io.File;

import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;

import lda.com.camerasurfacerecorder.recorder.MMuxer;
import lda.com.camerasurfacerecorder.recorder.VideoSurfaceEncoder;
import lda.com.camerasurfacerecorder.recorder.audio.AudioEncoder;

/**
 *摄像头预览opengl渲染器
 */

public class SurfaceRenderer implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener{
    private static String TAG = SurfaceRenderer.class.getSimpleName();
    private CameraGLSurfaceView mSurfaceView;//GLSurfaceView
    private SurfaceTexture mSurfaceTexture;//渲染纹理
    private int mTextureId;
    private GLDrawer2D mDrawer;//OpenGL绘制
    private float[] mSurfaceTextureMatrix = new float[16];//纹理变换矩阵
    //投影变换矩阵(注意,opengl坐标系和手机屏幕坐标系不同,为了正常显示,opengl坐标需要左乘投影变换矩阵左)
    private float[] mMvpMatrix = new float[16];
    private boolean mIsNeedUpdateTexture = false;
    private boolean mIsNeedRecord = false;
    private VideoSurfaceEncoder mVideoEncoder;//视频编码器
    private boolean mIsRecordCurrFrame = true;
    private boolean mIsStopRecorder = false;
    private AudioEncoder mAudioEncoder;//音频编码器

    public SurfaceRenderer(CameraGLSurfaceView surfaceView) {
        mSurfaceView = surfaceView;
        Matrix.setIdentityM(mMvpMatrix, 0);
    }

    /**
     * Renderer
     * @param gl
     * @param config
     */
    @Override
    public void onSurfaceCreated(GL10 gl, EGLConfig config) {
        Log.v(TAG, "onSurfaceCreated:");
        // 摄像头渲染需要 OES_EGL_image_external extension
        final String extensions = GLES20.glGetString(GLES20.GL_EXTENSIONS); // API >= 8
        if (!extensions.contains("OES_EGL_image_external"))
            throw new RuntimeException("This system does not support OES_EGL_image_external.");
        // 创建纹理ID
        mTextureId = GLDrawer2D.initTextureId();
        // 创建渲染纹理
        mSurfaceTexture = new SurfaceTexture(mTextureId);
        mSurfaceTexture.setOnFrameAvailableListener(this);
        // 黄色清屏
        GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
        mDrawer = new GLDrawer2D();
        mDrawer.setMatrix(mMvpMatrix, 0);
    }

    public boolean isNeedRecord() {
        return mIsNeedRecord;
    }

    public void setNeedRecord(boolean isNeedRecord){
        mIsNeedRecord = isNeedRecord;
    }

    /**
     * Renderer
     * @param gl
     * @param width
     * @param height
     */
    @Override
    public void onSurfaceChanged(GL10 gl, int width, int height) {
        Log.v(TAG, String.format("onSurfaceChanged:(%d,%d)", width, height));
        // if at least with or height is zero, initialization of this view is still progress.
        if ((width == 0) || (height == 0)) return;
        updateViewport();
        mSurfaceView.startPreview(width, height);
    }

    /**
     * opengl绘制函数
     * @param gl
     */
    @Override
    public void onDrawFrame(GL10 gl) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        if (mIsNeedUpdateTexture) {
            mIsNeedUpdateTexture = false;
            //更新纹理(摄像头已经绑定该SurfaceTexture)
            mSurfaceTexture.updateTexImage();
            // 获取纹理变换矩阵
            mSurfaceTexture.getTransformMatrix(mSurfaceTextureMatrix);
        }
        // draw to preview screen
        if(mIsNeedRecord){
            if(mVideoEncoder == null){
                MMuxer mMuxer = new MMuxer(getSaveVideoPath());
                mVideoEncoder = new VideoSurfaceEncoder(mMuxer, mSurfaceView.getVideoWidth(), mSurfaceView.getVideoHeight());
                mAudioEncoder = new AudioEncoder(mMuxer);
                mVideoEncoder.setAllKeyFrame(true);
                mVideoEncoder.setEglAndStart(EGL14.eglGetCurrentContext(), mTextureId);
                mAudioEncoder.start();
                Log.d(TAG, "init encoder");
            }
//            Log.d(TAG, "encoderprepared=" + mEncoder.isPrepared() + " isRecordCurrFrame=" + mIsRecordCurrFrame);
            if(mVideoEncoder != null && mVideoEncoder.isPrepared() && mIsRecordCurrFrame){
                long curr = System.currentTimeMillis();
                Log.d(TAG, "======drawTime========" + (curr - mDrawTime));
                mDrawTime = curr;
                mVideoEncoder.render(mSurfaceTextureMatrix, mMvpMatrix);
            }
            mIsRecordCurrFrame = !mIsRecordCurrFrame;
            if(mIsStopRecorder){
                mVideoEncoder.eos();
                mAudioEncoder.eos();
                mIsNeedRecord = false;
                mVideoEncoder = null;
            }
        }
        mDrawer.draw(mTextureId, mSurfaceTextureMatrix);
    }

    public long mDrawTime = 0;

    public void setStopRecorder(boolean stopRecorder) {
        mIsStopRecorder = stopRecorder;
    }

    private String getSaveVideoPath() {
        File dir = new File(Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "00recorder" + File.separator);
        if(!dir.exists() || !dir.isDirectory()){
            dir.mkdirs();
        }
        File file = new File(dir, "surface.mp4");
        return file.getAbsolutePath();
    }

    public void onSurfaceDestroy(){
        Log.v(TAG, "onSurfaceDestroyed:");
        if (mDrawer != null) {
            mDrawer.release();
            mDrawer = null;
        }
        if (mSurfaceTexture != null) {
            mSurfaceTexture.release();
            mSurfaceTexture = null;
        }
        GLDrawer2D.deleteTex(mTextureId);
    }


    /**
     * OnFrameAvailableListener
     * @param surfaceTexture
     */
    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        mIsNeedUpdateTexture = true;
    }

    public SurfaceTexture getSurfaceTexture() {
        return mSurfaceTexture;
    }

    /**
     * 获取视窗,即需要从opengl的画布中截出一片区域用于显示内容
     */
    public void updateViewport(){
        final int view_width = mSurfaceView.getWidth();
        final int view_height = mSurfaceView.getHeight();
        GLES20.glViewport(0, 0, view_width, view_height);
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        final double video_width = mSurfaceView.getVideoWidth();
        final double video_height = mSurfaceView.getVideoHeight();
        if (video_width == 0 || video_height == 0) return;
        Matrix.setIdentityM(mMvpMatrix, 0);
        final double view_aspect = view_width / (double)view_height;
        Log.i(TAG, String.format("view(%d,%d)%f,video(%1.0f,%1.0f)", view_width, view_height, view_aspect, video_width, video_height));
        if (mDrawer != null)
            mDrawer.setMatrix(mMvpMatrix, 0);
    }
}

GLDraw2D opengl环境的初始化类,这个类的功能如下:

加载顶点和片元两个着色器(shader):加载一个shader的流程是:创建shader——>加载shader——>编译shader,通过这个流程拿到顶点和片元shader,进入创建shader程序流程:加入顶点shader——>加入片元shader——>链接程序——>检测链接结果——>返回结果(成功则返回shader程序,失败返回null)——>使用shader程序

绘制纹理流程:使用shader程序——>激活纹理——>绑定纹理——>绘制纹理(片元绘制)——>释放纹理——>释放shader程序

创建纹理流程:新建纹理——>设定纹理格式(相机格式)——>设置纹理环绕——>设置纹理滤波——>返回纹理ID

对于opengl不太了解的小伙伴,建议看下这篇文章【opengl渲染流程】

package lda.com.camerasurfacerecorder;

import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;

import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;

/**
 * Helper class to draw to whole view using specific texture and texture matrix
 */
public class GLDrawer2D {
    private static final String TAG = "GLDrawer2D";

    //顶点shader,画点
    private static final String vss
        = "uniform mat4 uMVPMatrix;\n"
        + "uniform mat4 uTexMatrix;\n"
        + "attribute highp vec4 aPosition;\n"
        + "attribute highp vec4 aTextureCoord;\n"
        + "varying highp vec2 vTextureCoord;\n"
        + "\n"
        + "void main() {\n"
        + " gl_Position = uMVPMatrix * aPosition;\n"
        + " vTextureCoord = (uTexMatrix * aTextureCoord).xy;\n"
        + "}\n";
    //片元shader,画面
    private static final String fss
        = "#extension GL_OES_EGL_image_external : require\n"
        + "precision mediump float;\n"
        + "uniform samplerExternalOES sTexture;\n"
        + "varying highp vec2 vTextureCoord;\n"
        + "void main() {\n"
        + "  gl_FragColor = texture2D(sTexture, vTextureCoord);\n"
        + "}";
    private static final float[] VERTICES = { 1.0f, 1.0f, -1.0f, 1.0f, 1.0f, -1.0f, -1.0f, -1.0f };
    private static final float[] TEXCOORD = { 1.0f, 1.0f, 0.0f, 1.0f, 1.0f, 0.0f, 0.0f, 0.0f };

    private final FloatBuffer pVertex;
    private final FloatBuffer pTexCoord;
    private int hProgram;
    int maPositionLoc;
    int maTextureCoordLoc;//纹理坐标引用
    int muMVPMatrixLoc;//投影变换矩阵引用
    int muTexMatrixLoc;//纹理引用
    //投影变换矩阵(注意,opengl坐标系和手机屏幕坐标系不同,为了正常显示,opengl坐标需要左乘投影变换矩阵左)
    private final float[] mMvpMatrix = new float[16];

    private static final int FLOAT_SZ = Float.SIZE / 8;
    private static final int VERTEX_NUM = 4;
    private static final int VERTEX_SZ = VERTEX_NUM * 2;
    /**
     * Constructor
     * this should be called in GL context
     */
    public GLDrawer2D() {
        /**
         * 获取图形的顶点
         * 特别提示:由于不同平台字节顺序不同数据单元不是字节的一定要经过ByteBuffer
         * 转换,关键是要通过ByteOrder设置nativeOrder(),否则有可能会出问题
         *
         */
        pVertex = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
        pVertex.put(VERTICES);
        pVertex.flip();
        /**
         * 同上
         */
        pTexCoord = ByteBuffer.allocateDirect(VERTEX_SZ * FLOAT_SZ)
                .order(ByteOrder.nativeOrder()).asFloatBuffer();
        pTexCoord.put(TEXCOORD);
        pTexCoord.flip();

        hProgram = loadShader(vss, fss);
        //使用shader程序
        GLES20.glUseProgram(hProgram);
        /**
         * attribute变量是只能在vertex shader中使用的变量。(它不能在fragment shader中声明attribute变量,也不能被fragment shader中使用)
         一般用attribute变量来表示一些顶点的数据,如:顶点坐标,法线,纹理坐标,顶点颜色等。
         在application中,一般用函数glBindAttribLocation()来绑定每个attribute变量的位置,然后用函数glVertexAttribPointer()为每个attribute变量赋值。
         */
        maPositionLoc = GLES20.glGetAttribLocation(hProgram, "aPosition");
        maTextureCoordLoc = GLES20.glGetAttribLocation(hProgram, "aTextureCoord");
        /**
         * uniform变量是外部application程序传递给(vertex和fragment)shader的变量。因此它是application通过函数glUniform**()函数赋值的。在(vertex和fragment)shader程序内部,uniform变量就像是C语言里面的常量(const ),它不能被shader程序修改。(shader只能用,不能改)
         如果uniform变量在vertex和fragment两者之间声明方式完全一样,则它可以在vertex和fragment共享使用。(相当于一个被vertex和fragment shader共享的全局变量)
         uniform变量一般用来表示:变换矩阵,材质,光照参数和颜色等信息。
         */
        muMVPMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uMVPMatrix");
        muTexMatrixLoc = GLES20.glGetUniformLocation(hProgram, "uTexMatrix");

        Matrix.setIdentityM(mMvpMatrix, 0);
         /*
         * 应用投影和视口变换
         * 为当前程序对象指定Uniform变量的值
         *  location
            指明要更改的uniform变量的位置
            count
            指明要更改的矩阵个数
            transpose
            指明是否要转置矩阵,并将它作为uniform变量的值。必须为GL_FALSE。
            value
            指明一个指向count个元素的指针,用来更新指定的uniform变量。
         */
        GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
        GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, mMvpMatrix, 0);
        //将顶点位置数据传送进渲染管线, 为画笔指定顶点的位置坐标数据
        GLES20.glVertexAttribPointer(maPositionLoc,//顶点位置数据引用
                2, //每2个数字代表一个坐标
                GLES20.GL_FLOAT,//坐标单位为浮点类型
                false,
                VERTEX_SZ,//每组数据字节数量
                pVertex);//缓冲区
        GLES20.glVertexAttribPointer(maTextureCoordLoc, 2, GLES20.GL_FLOAT, false, VERTEX_SZ, pTexCoord);
        //将纹理数据传进渲染管线,为画笔指定纹理坐标数据
        GLES20.glEnableVertexAttribArray(maPositionLoc);
        GLES20.glEnableVertexAttribArray(maTextureCoordLoc);
    }

    /**
     * terminatinng, this should be called in GL context
     */
    public void release() {
        if (hProgram >= 0)
            GLES20.glDeleteProgram(hProgram);
        hProgram = -1;
    }

    /**
     * draw specific texture with specific texture matrix
     * @param tex_id texture ID
     * @param tex_matrix texture matrix、if this is null, the last one use(we don't check size of this array and needs at least 16 of float)
     */
    public void draw(final int tex_id, final float[] tex_matrix) {
        GLES20.glUseProgram(hProgram);
        if (tex_matrix != null)
            GLES20.glUniformMatrix4fv(muTexMatrixLoc, 1, false, tex_matrix, 0);
        GLES20.glUniformMatrix4fv(muMVPMatrixLoc, 1, false, mMvpMatrix, 0);
        //激活纹理
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        //绑定纹理
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex_id);
        //第一个参数表示绘制方式(三角形),第二个参数表示偏移量,第三个参数表示顶点个数。
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, VERTEX_NUM);
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, 0);
        GLES20.glUseProgram(0);
    }

    /**
     * Set model/view/projection transform matrix
     * @param matrix
     * @param offset
     */
    public void setMatrix(final float[] matrix, final int offset) {
        if ((matrix != null) && (matrix.length >= offset + 16)) {
            System.arraycopy(matrix, offset, mMvpMatrix, 0, 16);
        } else {
            Matrix.setIdentityM(mMvpMatrix, 0);
        }
    }
    /**
     * create external texture
     * @return texture ID
     */
    public static int initTextureId() {
        final int[] tex = new int[1];
        //创建纹理
        GLES20.glGenTextures(1, tex, 0);
        //纹理帮定的目标(target)并不是通常的GL_TEXTURE_2D,而是GL_TEXTURE_EXTERNAL_OES,这是因为Camera使用的输出texture是一种特殊的格式
        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
        //纹理坐标系用S-T来表示,S为横轴,T为纵轴。
        // 参数1:纹理类型,参数2:纹理环绕方向, 参数3:纹理坐标范围(GL_CLAMP_TO_EDGE:纹理坐标到[1/2n,1-1/2n],GL_CLAMP:截取纹理坐标到 [0,1])
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
        //第二个参数指定滤波方法,其中参数值GL_TEXTURE_MAG_FILTER指定为放大滤波方法,
        // GL_TEXTURE_MIN_FILTER指定为缩小滤波方法;第三个参数说明滤波方式
        //GL_NEAREST则采用坐标最靠近象素中心的纹素,这有可能使图像走样;
        // 若选择GL_LINEAR则采用最靠近象素中心的四个象素的加权平均值。
        // GL_NEAREST所需计算比GL_LINEAR要少,因而执行得更快,但GL_LINEAR提供了比较光滑的效果。
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
        Log.v(TAG, "initTextureId:" + tex[0]);
        return tex[0];
    }

    /**
     * delete specific texture
     */
    public static void deleteTex(final int hTex) {
        Log.v(TAG, "deleteTex:");
        final int[] tex = new int[] {hTex};
        GLES20.glDeleteTextures(1, tex, 0);
    }

    /**
     * load, compile and link shader
     * @param vss source of vertex shader
     * @param fss source of fragment shader
     * @return
     */
    public static int loadShader(final String vss, final String fss) {
        Log.v(TAG, "loadShader:");
        int vs = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
        GLES20.glShaderSource(vs, vss);//加载顶点 shader
        GLES20.glCompileShader(vs);//编译shader
        final int[] compiled = new int[1];
        //获取shader的编译结果
        GLES20.glGetShaderiv(vs, GLES20.GL_COMPILE_STATUS, compiled, 0);
        if (compiled[0] == 0) {//获取失败,删除shader并log
             Log.e(TAG, "Failed to compile vertex shader:" + GLES20.glGetShaderInfoLog(vs));
            GLES20.glDeleteShader(vs);
            vs = 0;
        }
        //创建片元着色器
        int fs = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
        GLES20.glShaderSource(fs, fss);
        GLES20.glCompileShader(fs);
        GLES20.glGetShaderiv(fs, GLES20.GL_COMPILE_STATUS, compiled, 0);
        if (compiled[0] == 0) {
             Log.w(TAG, "Failed to compile fragment shader:" + GLES20.glGetShaderInfoLog(fs));
            GLES20.glDeleteShader(fs);
            fs = 0;
        }

        //创建shader程序
        int program = GLES20.glCreateProgram();
        if(program != 0) {//创建成功
            //加入顶点着色器
            GLES20.glAttachShader(program, vs);
            //加入片元着色器
            GLES20.glAttachShader(program, fs);
            //链接程序
            GLES20.glLinkProgram(program);
            int[] linkStatus = new int[1];
            //获取链接程序结果
            GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
            // 若链接失败则报错并删除程序
            if (linkStatus[0] != GLES20.GL_TRUE) {
                Log.e(TAG, "Could not link program: ");
                Log.e(TAG, GLES20.glGetProgramInfoLog(program));
                GLES20.glDeleteProgram(program);
                program = 0;
            }
        }
        return program;
    }

}

编码音视频

上面讲述了预览部分,下面将详述编码流程,编码流程分为两部分,一部分是视频编码,另外一部分是音频编码。这个部分会涉及到opengl和egl,以及编码器的核心MediaCodec,opengl前面已经讲述,这里不在介绍,egl如果大家不熟悉,可以看下这个链接egl接口解析

视频编码器
视频编码器的基本流程如下代码:

package lda.com.camerasurfacerecorder.recorder;

/**
 * Created by lda on 2017/10/11.
 */

public interface IEncoder {
    void prepare();//初始化编码器
    void input(Frame frame);//输入相机数据
    void output(boolean isEos);//输出编码后的数据
    void eos();//终止编码器
    void release();//释放编码器
}

从摄像头到最后编码输出的流程如下:

初始化:
创建视频编码器——>创建EGL环境——>配置OpenGL环境(EGL专用)——>启动EGL渲染线程(轮询线程)——>启动视频编码器线程(轮询线程)

视频编码:
相机渲染器把纹理数据传给EGL渲染器(SurfaceRender.onDrawFrame()函数),opengl在EGL环境下在视频编码器对应的Surface绘制纹理,此时编码器会自动编码数据,输出编码后的数据到音视频混合器(录制的过程就是重复此过程)

结束编码:释放egl资源,释放编码器

视频编码过程中的线程协作:Lock, Condition
初始化完成后egl渲染线程和编码器线程都是await——>Opengl 线程传递纹理数据给egl——>egl渲染器处理纹理数据——>egl 渲染线程signal——>egl线程的opengl绘制纹理——>编码器线程signal,egl线程进入await——>编码器线程输出编码数据——>编码器线程进入await(此时egl渲染线程也是await)

注意,编码器所有opengl的操作都需要在egl的环境下完成,包括初始化以及绘制过程

EGL的绘图的一般步骤
1,获取EGLDisplay对象
2,初始化与EGLDisplay 之间的连接
3,获取EGLConfig对象
4,创建EGLContext 实例
5,创建EGLSurface实例
6,连接EGLContext和EGLSurface.
7,使用GL指令绘制图形:GLDrawer2D.draw 在encoder中调用绘制
8,断开并释放与EGLSurface关联的EGLContext对象
9,删除EGLSurface对象
10,删除EGLContext对象
11,终止与EGLDisplay之间的连接。
每次绘制一帧都需要6->7过程吗,7绘制完就通知编码器输出数据,否则下次绘制会把上次绘制的数据冲掉

下面是编码器的代码:

package lda.com.camerasurfacerecorder.recorder;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;

import java.nio.ByteBuffer;
import java.util.concurrent.Callable;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;

import lda.com.camerasurfacerecorder.Config;

/**
 * Created by lda on 2017/11/28.
 */

public abstract class MediaEncoder implements Callable, IEncoder {
    protected String TAG = "MediaEncoder";
    protected static final long TIMEOUT_USEC = 10000;
    protected MMuxer mMuxer;
    protected MediaCodec mMediaCodec;
    protected boolean mIsRecording = false;
    protected boolean mIsInit = false;
    protected MediaType mMediaType;
    protected boolean mIsEos = false;


    protected MediaCodec.BufferInfo mBufferInfo;
    protected MediaFormat mMediaFormat;
    protected int mTrackIndex;

    protected MediaEncoder(MMuxer muxer, MediaType type){
        mMuxer = muxer;
        mMediaType = type;
    }

    public boolean isRecording() {
        return mIsRecording;
    }

    public void setRecording(boolean recording) {
        mIsRecording = recording;
    }

    protected abstract boolean isEos();

    protected void init() {
        mBufferInfo = new MediaCodec.BufferInfo();
        prepare();
        mIsInit = true;
        setRecording(true);
    }

    @Override
    public void output(boolean isEos) {
        String tag = TAG + "-output";
        ByteBuffer[] outputBuffers = null;
        int count = 0;
        int outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        try{
            outputBuffers = mMediaCodec.getOutputBuffers();
            do{
                if(outputIndex == MediaCodec.INFO_TRY_AGAIN_LATER){
                    Log.i(tag, "output from encoder not available");
                    if(!isEos){
                        count++;
                        if(count >= 5){
                            Log.i(tag, "output from encoder not available and break===========");
                            break;
                        }
                    }
                }else if(outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED){
                    outputBuffers = mMediaCodec.getOutputBuffers();
                    Log.i(tag, "encoder output buffers changed");
                }else if(outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                    //在音视频混合器中加入视频/音频轨道
                    addTrack();
                    Log.i(tag, "encoder output format change");
                }else if(outputIndex < 0){
                    Log.e(tag, "output buffer wrong " + outputIndex);
                }else{
                    ByteBuffer outputBuffer = outputBuffers[outputIndex];
                    if(outputBuffer == null){
                        Log.e(tag, "output buffer null");
                        return;
                    }
                    if((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0){
                        // The codec config data was pulled out and fed to the muxer when we got
                        // the INFO_OUTPUT_FORMAT_CHANGED status.  Ignore it.
                        mBufferInfo.size = 0;
                    }
                    Log.d(tag, "buffer size=" + mBufferInfo.size + " pts=" + mBufferInfo.presentationTimeUs);
                    if(mBufferInfo.size != 0){
                        if(!mMuxer.isVideoTrackAdd()){
                            addTrack();
                        }
                        if(!mMuxer.isStarted() && mMuxer.isPrepared()){
                            mMuxer.start();
                        }
                        if(mMuxer.isStarted()){
                            outputBuffer.position(mBufferInfo.offset);
                            outputBuffer.limit(mBufferInfo.offset + mBufferInfo.size);
                            //写入指定轨道的数据
                            mMuxer.writeSampleData(mTrackIndex, outputBuffer, mBufferInfo);
                        }
                    }
                    mMediaCodec.releaseOutputBuffer(outputIndex, false);
                    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        // 停止编码器
                        Log.d(tag, "output: eos coming");
                        mIsRecording = false;
                        release();
                        break;      // out of while
                    }
                }
                outputIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);

            }while (outputIndex >= 0);
        }catch (Exception e){
        }
    }

    //加入轨道
    protected void addTrack() {
        mMediaFormat = mMediaCodec.getOutputFormat();
        mTrackIndex = mMuxer.addTrack(mMediaFormat, mMediaType);
    }

    @Override
    public void release() {
        if(!mIsRecording){
            mMuxer.eos(mTrackIndex);
            mMediaCodec.release();
        }
    }

    protected MediaCodecInfo selectCodec(String mimeType) {
        int numCodecs = MediaCodecList.getCodecCount();
        for (int i = 0; i < numCodecs; i++) {
            MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
            if (!codecInfo.isEncoder()) {
                continue;
            }
            String[] types = codecInfo.getSupportedTypes();
            for (int j = 0; j < types.length; j++) {
                if (types[j].equalsIgnoreCase(mimeType)) {
                    return codecInfo;
                }
            }
        }
        return null;
    }

    public enum MediaType{
        VIDEO,
        AUDIO
    }
}
package lda.com.camerasurfacerecorder.recorder;

import android.annotation.TargetApi;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.os.Bundle;
import android.util.Log;

import java.nio.ByteBuffer;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;

import lda.com.camerasurfacerecorder.Config;

/**
 * Created by lda on 2017/11/28.
 */

public abstract class VideoEncoder extends MediaEncoder {
    protected static final String MIME_TYPE = "video/avc";
    protected static final int FRAME_RATE = 30;
    protected static final int BIT_RATE = 4 * 1024 * 1024;
    protected static final int IFRAME_INTERVAL = 1;//1秒
    protected int mWidth = Config.VIDEO_WIDTH;
    protected int mHeight = Config.VIDEO_HEIGHT;
    protected Lock mLock;
    protected Condition mOutputCondition;
    protected boolean mIsAllKeyFrame = false;

    protected VideoEncoder(MMuxer muxer) {
        super(muxer, MediaType.VIDEO);
        TAG = "VideoEncoder";
    }

    @Override
    protected void init() {
        mLock = new ReentrantLock();
        mOutputCondition = mLock.newCondition();
        super.init();
    }

    @TargetApi(19)
    public void setAllKeyFrame(boolean allKeyFrame) {
        mIsAllKeyFrame = allKeyFrame;
    }

    public boolean isAllKeyFrame() {
        return mIsAllKeyFrame;
    }

    @Override
    public void input(Frame frame) {
    }

    @Override
    protected boolean isEos() {
        return mIsEos;
    }

    @Override
    public void eos() {
        mIsEos = true;
    }

    @TargetApi(19)
    protected void requestKeyFrame() {
        if (isRecording()){
            try {
                Bundle reqKeyCmd = new Bundle();
                reqKeyCmd.putInt(MediaCodec.PARAMETER_KEY_REQUEST_SYNC_FRAME, 0);
                mMediaCodec.setParameters(reqKeyCmd);
                //Log.v("meeee", " reqi");
            } catch (Exception e) {
            }
        }
    }

}
package lda.com.camerasurfacerecorder.recorder;

import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.opengl.EGLContext;
import android.util.Log;
import android.view.Surface;

import java.io.IOException;
import java.nio.ByteBuffer;

import lda.com.camerasurfacerecorder.CameraGLSurfaceView;
import lda.com.camerasurfacerecorder.Config;
import lda.com.camerasurfacerecorder.thread.ThreadPool;

/**
 * Created by lda on 2017/11/28.
 */

public class VideoSurfaceEncoder extends VideoEncoder {
    public static final int SURFACE_COLOR_FORMAT = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
    private Surface mSurface;
    private SurfaceEncoderRenderer mRenderer;

    private static int getSurfaceColorFormat() {
        return SURFACE_COLOR_FORMAT;
    }

    public VideoSurfaceEncoder(MMuxer muxer, int width, int height) {
        super(muxer);
        mRenderer = new SurfaceEncoderRenderer();
        mWidth = width;
        mHeight = height;
    }

    @Override
    public Object call() throws Exception {
        while (!mIsInit){
            init();
        }
        while (!mIsInit){
            return null;
        }
        while (mIsRecording){
            try {
                mLock.lock();
                if(isEos()){
                    //停止编码器
                    mMediaCodec.signalEndOfInputStream();//signalEndOfInputStream只对surface录制有效
                    Log.d(TAG, "singal eos");
                    output(true);
                    break;
                }
                mOutputCondition.await();//进入await状态
                output(false);
            }finally {
                mLock.unlock();
            }
        }
        return null;
    }

    @Override
    public void output(boolean isEos) {
        if(isAllKeyFrame()){
            requestKeyFrame();
        }
        super.output(isEos);
    }

    @Override
    public void prepare() {
        Log.i(TAG, "prepare: ");
        mTrackIndex = -1;
        final MediaCodecInfo videoCodecInfo = selectCodec(MIME_TYPE);
        if (videoCodecInfo == null) {
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);  // API >= 18
        format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);//设置码率
        format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);//设置帧率
        if(!mIsAllKeyFrame) {
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
        }else{
            format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 0);//设置全关键帧
        }
        try {
            mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
            mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            // get Surface for encoder input
            // this method only can call between #configure and #start
            mSurface = mMediaCodec.createInputSurface();    // API >= 18
            mMediaCodec.start();
            mRenderer.setSurface(mSurface);
            mRenderer.start();
            Log.i(TAG, "prepare finishing");
        } catch (IOException e) {
            Log.e(TAG, "" + e);
            e.printStackTrace();
        }
    }

    public boolean isPrepared() {
        return mIsInit && mRenderer.isInitGL();
    }

    public SurfaceEncoderRenderer getRenderer() {
        return mRenderer;
    }

    @Override
    public void release() {
        super.release();
        if(mSurface != null){
            mSurface.release();
        }
        if(mRenderer != null){
            mRenderer.release();
        }
    }

    public void setEglAndStart(EGLContext eglContext, int textureId){
        mRenderer.setEglContext(eglContext, textureId, this);
        ThreadPool.getInstance().run(this);
    }

    //signal此线程
    public void singalOutput() {
        try {
            mLock.lock();
            mOutputCondition.signal();
        }finally {
            mLock.unlock();
        }

    }

    //egl 绘制
    public void render(float[] surfaceTextureMatrix, float[] mvpMatrix) {
        if(isAllKeyFrame()){
            requestKeyFrame();
        }
        mRenderer.draw(surfaceTextureMatrix, mvpMatrix);
        if(isAllKeyFrame()){
            requestKeyFrame();
        }
    }
}

下面是EGL相关代码:

package lda.com.camerasurfacerecorder.recorder.egl;

import android.graphics.SurfaceTexture;
import android.opengl.EGL14;
import android.opengl.EGLConfig;
import android.opengl.EGLContext;
import android.opengl.EGLDisplay;
import android.opengl.EGLSurface;
import android.os.Build;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

/**
 * Created by lda on 2017/11/28.
 */

/**
 * EGL的绘图的一般步骤:

 1,获取EGLDisplay对象
 2,初始化与EGLDisplay 之间的连接。
 3,获取EGLConfig对象
 4,创建EGLContext 实例
 5,创建EGLSurface实例

 6,连接EGLContext和EGLSurface.
 7,使用GL指令绘制图形:GLDrawer2D.draw 在encoder中调用绘制

 8,断开并释放与EGLSurface关联的EGLContext对象
 9,删除EGLSurface对象
 10,删除EGLContext对象
 11,终止与EGLDisplay之间的连接。

 每次绘制一帧都需要6->7过程吗,7绘制完就通知编码器输出数据,否则下次绘制会把上次绘制的数据冲掉
 */
public class MEgl {
    private static final String TAG = "MEgl";
    private static final int EGL_RECORDABLE_ANDROID = 0x3142;
    private EGLDisplay mEglDisplay = EGL14.EGL_NO_DISPLAY;
    private EGLContext mEglContext = EGL14.EGL_NO_CONTEXT;
    private EGLConfig mEglConfig;
    private EGLSurface mEGLSurface;
    private EGLContext mDefaultContext = EGL14.EGL_NO_CONTEXT;

    public void init(EGLContext eglContext, final boolean isDepthBuffer, final boolean isRecordable, Object surface) {
        Log.v(TAG, "init:");
        if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
            throw new RuntimeException("EGL already set up");
        }
        //1,获取EGLDisplay对象
        mEglDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY);
        if (mEglDisplay == EGL14.EGL_NO_DISPLAY) {
            Log.e(TAG, "eglGetDisplay failed");
        }

        final int[] version = new int[2];
        // 2,初始化与EGLDisplay 之间的连接。
        if (!EGL14.eglInitialize(mEglDisplay, version, 0, version, 1)) {
            mEglDisplay = null;
            Log.e(TAG, "eglInitialize failed");
        }

        eglContext = eglContext != null ? eglContext : EGL14.EGL_NO_CONTEXT;

        if (mEglContext == EGL14.EGL_NO_CONTEXT) {
            //3,获取EGLConfig对象
            mEglConfig = getConfig(isDepthBuffer, isRecordable);
            if (mEglConfig == null) {
                Log.e(TAG, "chooseConfig failed");
            }
            //  4,创建EGLContext 实例
            mEglContext = createContext(eglContext);
        }
        // confirm whether the EGL rendering context is successfully created
        final int[] values = new int[1];
        EGL14.eglQueryContext(mEglDisplay, mEglContext, EGL14.EGL_CONTEXT_CLIENT_VERSION, values, 0);
        Log.d(TAG, "EGLContext created, client version " + values[0]);
        makeDefault();  // makeCurrent(EGL14.EGL_NO_SURFACE);

        if (!(surface instanceof SurfaceView) && !(surface instanceof Surface) && !(surface instanceof SurfaceHolder) && !(surface instanceof SurfaceTexture)) {
            Log.e(TAG, "unsupported surface");
        }
        //5,创建EGLSurface实例
        mEGLSurface = createWindowSurface(surface);
        makeCurrent();
    }

    /**
     * 6,连接EGLContext和EGLSurface.
     * @return
     */
    public boolean makeCurrent() {
//      if (DEBUG) Log.v(TAG, "makeCurrent:");
        if (mEglDisplay == null) {
            Log.d(TAG, "makeCurrent:eglDisplay not initialized");
        }
        if (mEGLSurface == null || mEGLSurface == EGL14.EGL_NO_SURFACE) {
            final int error = EGL14.eglGetError();
            if (error == EGL14.EGL_BAD_NATIVE_WINDOW) {
                Log.e(TAG, "makeCurrent:returned EGL_BAD_NATIVE_WINDOW.");
            }
            return false;
        }
        // attach EGL renderring context to specific EGL window surface
        if (!EGL14.eglMakeCurrent(mEglDisplay, mEGLSurface, mEGLSurface, mEglContext)) {
            Log.w(TAG, "eglMakeCurrent:" + EGL14.eglGetError());
            return false;
        }
        return true;
    }

    private EGLSurface createWindowSurface(final Object nativeWindow) {
        Log.v(TAG, "createWindowSurface:nativeWindow=" + nativeWindow);
        final int[] surfaceAttribs = {EGL14.EGL_NONE};
        EGLSurface result = null;
        try {
            result = EGL14.eglCreateWindowSurface(mEglDisplay, mEglConfig, nativeWindow, surfaceAttribs, 0);
        } catch (final IllegalArgumentException e) {
            Log.e(TAG, "eglCreateWindowSurface", e);
        }
        return result;
    }

    //绘制完毕后使用eglSwapBuffers()交换前后缓冲,用户即看到在后缓冲中的内容
    public int swapBuffers() {
        if (!EGL14.eglSwapBuffers(mEglDisplay, mEGLSurface)) {
            final int err = EGL14.eglGetError();
            Log.w(TAG, "swap:err=" + err);
            return err;
        }
        return EGL14.EGL_SUCCESS;
    }

    private void makeDefault() {
        Log.v(TAG, "makeDefault:");
        if (!EGL14.eglMakeCurrent(mEglDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_CONTEXT)) {
            Log.w("TAG", "makeDefault" + EGL14.eglGetError());
        }
    }

    private EGLContext createContext(final EGLContext shared_context) {
//      if (DEBUG) Log.v(TAG, "createContext:");

        final int[] attrib_list = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2,
                EGL14.EGL_NONE
        };
        final EGLContext context = EGL14.eglCreateContext(mEglDisplay, mEglConfig, shared_context, attrib_list, 0);
        checkEglError("eglCreateContext");
        return context;
    }

    private EGLConfig getConfig(final boolean with_depth_buffer, final boolean isRecordable) {
        final int[] attribList = {
                EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,
                EGL14.EGL_RED_SIZE, 8,//指定RGB中R的大小
                EGL14.EGL_GREEN_SIZE, 8,//指定G大小
                EGL14.EGL_BLUE_SIZE, 8,//指定B大小
                EGL14.EGL_ALPHA_SIZE, 8,//指定Alpha大小
                EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL14.EGL_STENCIL_SIZE, 8,
                EGL14.EGL_NONE, EGL14.EGL_NONE, //EGL_RECORDABLE_ANDROID, 1,    // this flag need to recording of MediaCodec
                EGL14.EGL_NONE, EGL14.EGL_NONE, //  with_depth_buffer ? EGL14.EGL_DEPTH_SIZE : EGL14.EGL_NONE,
                // with_depth_buffer ? 16 : 0,
                EGL14.EGL_NONE
        };
        int offset = 10;
        if (false) {
            attribList[offset++] = EGL14.EGL_STENCIL_SIZE;
            attribList[offset++] = 8;
        }
        if (with_depth_buffer) {
            attribList[offset++] = EGL14.EGL_DEPTH_SIZE;
            attribList[offset++] = 16;
        }
        if (isRecordable && (Build.VERSION.SDK_INT >= 18)) {
            attribList[offset++] = EGL_RECORDABLE_ANDROID;
            attribList[offset++] = 1;
        }
        for (int i = attribList.length - 1; i >= offset; i--) {
            attribList[i] = EGL14.EGL_NONE;
        }
        final EGLConfig[] configs = new EGLConfig[1];
        final int[] numConfigs = new int[1];
        if (!EGL14.eglChooseConfig(mEglDisplay, attribList, 0, configs, 0, configs.length, numConfigs, 0)) {
            // XXX it will be better to fallback to RGB565
            Log.w(TAG, "unable to find RGBA8888 / " + " EGLConfig");
            return null;
        }
        return configs[0];
    }

    private void checkEglError(final String msg) {
        int error;
        if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) {
            throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error));
        }
    }

    public void release() {
        Log.v(TAG, "release:");
        if (mEglDisplay != EGL14.EGL_NO_DISPLAY) {
            destroyContext();
            EGL14.eglTerminate(mEglDisplay);
            EGL14.eglReleaseThread();
        }
        mEglDisplay = EGL14.EGL_NO_DISPLAY;
        mEglContext = EGL14.EGL_NO_CONTEXT;
    }

    private void destroyContext() {
        Log.v(TAG, "destroyContext:");

        if (!EGL14.eglDestroyContext(mEglDisplay, mEglContext)) {
            Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mEglContext);
            Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
        }
        mEglContext = EGL14.EGL_NO_CONTEXT;
        if (mDefaultContext != EGL14.EGL_NO_CONTEXT) {
            if (!EGL14.eglDestroyContext(mEglDisplay, mDefaultContext)) {
                Log.e("destroyContext", "display:" + mEglDisplay + " context: " + mDefaultContext);
                Log.e(TAG, "eglDestroyContex:" + EGL14.eglGetError());
            }
            mDefaultContext = EGL14.EGL_NO_CONTEXT;
        }
    }
}

下面是EGL的渲染线程:

package lda.com.camerasurfacerecorder.recorder;

import android.graphics.SurfaceTexture;
import android.opengl.EGLContext;
import android.opengl.GLES20;
import android.opengl.Matrix;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;

import java.util.concurrent.Callable;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;

import lda.com.camerasurfacerecorder.CameraGLSurfaceView;
import lda.com.camerasurfacerecorder.GLDrawer2D;
import lda.com.camerasurfacerecorder.recorder.egl.MEgl;
import lda.com.camerasurfacerecorder.thread.ThreadPool;

/**
 * Created by lda on 2017/11/28.
 */

public class SurfaceEncoderRenderer implements Callable {
    private String TAG = SurfaceEncoderRenderer.class.getSimpleName();
    private Condition mDrawCondition;
    private Lock mLock;
    private EGLContext mEglContext;
    private int mTextureId;
    private float[] mMatrix = new float[32];
    private Object mSurface;
    private VideoSurfaceEncoder mEncoder;
    private MEgl mEgl;
    private GLDrawer2D mDrawer;
    private boolean mIsInitGL = false;
    private boolean mHadSetEglContext = false;

    public SurfaceEncoderRenderer() {
        mLock = new ReentrantLock();
        mDrawCondition = mLock.newCondition();
    }

    @Override
    public Object call() throws Exception {
        Log.i(TAG, "encoder render call-------");
        while (!mIsInitGL){
            initGL();
        }
        while (!mIsInitGL){
            return null;
        }
        while (mEncoder.isRecording()){
            mLock.lock();
            try {
                Log.d(TAG, "await~~~~");
                mDrawCondition.await();
                mEgl.makeCurrent();
                //makeCurrent表明opengl的操作是在egl环境下
                // clear screen with yellow color so that you can see rendering rectangle
                GLES20.glClearColor(1.0f, 1.0f, 0.0f, 1.0f);
                GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
                mDrawer.setMatrix(mMatrix, 16);
                mDrawer.draw(mTextureId, mMatrix);
                mEgl.swapBuffers();
                mEncoder.singalOutput();//通知编码器线程要输出数据啦
                Log.d(TAG, "draw------------textureId=" + mTextureId);
            }finally {
                mLock.unlock();
            }

        }
        Log.d(TAG, "call: recording finish");
        return null;
    }

    //初始化egl及其opengl
    private void initGL() {
        mEgl = new MEgl();
        mEgl.init(mEglContext, false, true, mSurface);
        mEgl.makeCurrent();//drawer必须要在egl.makeCurrent()后初始化,才能保证mDrawer渲染的是egl对应的surface
        mDrawer = new GLDrawer2D();
        mIsInitGL = true;
        Log.d(TAG, "-----init egl opengl -------------");
    }

    public boolean isInitGL() {
        return mIsInitGL;
    }

    private void releaseGL() {
        Log.i(TAG, "internalRelease:");
        if (mDrawer != null) {
            mDrawer.release();
            mDrawer = null;
        }
        if (mEgl != null) {
            mEgl.release();
            mEgl = null;
        }
    }

    public void release(){
        releaseGL();
    }

    public final void setEglContext(final EGLContext eglContext, final int textureId, VideoSurfaceEncoder encoder) {
        mEncoder = encoder;
        mEglContext = eglContext;
        mTextureId = textureId;
        Matrix.setIdentityM(mMatrix, 0);
        Matrix.setIdentityM(mMatrix, 16);
        Log.i(TAG, "setEglContext--------------");
    }

    public final void draw(final float[] textureMatrix, final float[] mvpMatrix) {
        try {
            mLock.lock();
            if ((textureMatrix != null) && (textureMatrix.length >= 16)) {
                System.arraycopy(textureMatrix, 0, mMatrix, 0, 16);
            } else {
                Matrix.setIdentityM(mMatrix, 0);
            }
            if ((mvpMatrix != null) && (mvpMatrix.length >= 16)) {
                System.arraycopy(mvpMatrix, 0, mMatrix, 16, 16);
            } else {
                Matrix.setIdentityM(mMatrix, 16);
            }
            Log.d(TAG, "signal~~~~~");
            mDrawCondition.signal();//通知绘制
        }finally {
            mLock.unlock();
        }

    }

    public void setSurface(Surface surface) {
        mSurface = surface;
    }

    public void start() {
        if (!(mSurface instanceof SurfaceView) && !(mSurface instanceof Surface) && !(mSurface instanceof SurfaceHolder) && !(mSurface instanceof SurfaceTexture)) {
            Log.e(TAG, "unsupported surface");
        }else{
            ThreadPool.getInstance().run(this);
        }
    }
}

音频录制

小伙伴们看了上面MediaEncoder类,接下来音频录制就简单了,因为音频编码器也是继承于MediaEncoder

音频录制流程:

初始化音频编码器(非轮询线程)——>启动音频编码器——>启动音频采集线程(轮询线程)——>音频采集线程提供数据给音频编码器编码——>音频编码器输出编码内容——>音频写入音视频混合器

下面是音频编码器代码:

package lda.com.camerasurfacerecorder.recorder.audio;

import android.media.AudioFormat;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.util.Log;

import java.io.IOException;
import java.nio.ByteBuffer;

import lda.com.camerasurfacerecorder.recorder.Frame;
import lda.com.camerasurfacerecorder.recorder.MMuxer;
import lda.com.camerasurfacerecorder.recorder.MediaEncoder;
import lda.com.camerasurfacerecorder.thread.ThreadPool;

/**
 * Created by lda on 2017/12/1.
 */

public class AudioEncoder extends MediaEncoder {

    private static final String MIME_TYPE = "audio/mp4a-latm";
    private static final int SAMPLE_RATE = 44100;   // 44.1[KHz] is only setting guaranteed to be available on all devices.
    private static final int BIT_RATE = 64000;
    private AudioThread mAudioThread;

    public AudioEncoder(MMuxer muxer) {
        super(muxer, MediaType.AUDIO);
        TAG = "AudioEncoder";
    }

    @Override
    protected boolean isEos() {
        return mIsEos;
    }

    @Override
    protected void init() {
        super.init();

        ThreadPool.getInstance().run(mAudioThread);
    }

    @Override
    public Object call() throws Exception {
        init();
        return null;

    }

    @Override
    public void prepare() {
        Log.v(TAG, "audio encoder prepare:");
        mTrackIndex = -1;
        // prepare MediaCodec for AAC encoding of audio data from inernal mic.
        final MediaCodecInfo audioCodecInfo = selectCodec(MIME_TYPE);
        if (audioCodecInfo == null) {
            Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
            return;
        }
        Log.i(TAG, "selected codec: " + audioCodecInfo.getName());

        final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
        audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
//      audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
//      audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
        Log.i(TAG, "format: " + audioFormat);
        try {
            mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
            mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mMediaCodec.start();
            mAudioThread = new AudioThread(this);
            Log.i(TAG, "audio prepare finishing");
        } catch (IOException e) {
            Log.e(TAG, "audio prepare fail " + e);
            e.printStackTrace();
        }

    }

    @Override
    public void input(Frame frame) {

    }

    @Override
    public void eos() {
        mIsEos = true;
//        encodeAudioBuffer(null, 0, true);
    }

    /**
     * 音频采集得到的数据传进来编码
     * @param buffer
     * @param length
     */
    public void encodeAudioBuffer(ByteBuffer buffer, int length) {
        if(isEos()){
            encodeAudioBuffer(null, 0, true);
            return;
        }else{
            encodeAudioBuffer(buffer, length, false);
        }
    }

    /**
     * 编码数据
     * @param buffer
     * @param length
     * @param isEos
     */
    public void encodeAudioBuffer(ByteBuffer buffer, int length, boolean isEos) {
        if (!isRecording()) {
            return;
        }
        final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
        while (isRecording()) {
            final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
            if (inputBufferIndex >= 0) {
                final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                if (buffer != null) {
                    inputBuffer.put(buffer);
                }
                if (isEos) {
                    mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0, getPTS(), MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                } else {
                    mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length, getPTS(), 0);
                }
                output(isEos);
                break;
            } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // wait for MediaCodec encoder is ready to encode
                // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
                // will wait for maximum TIMEOUT_USEC(10msec) on each call
            }
        }
    }

    private long getPTS() {
        return System.nanoTime() / 1000;
    }

    public void start() {
        ThreadPool.getInstance().run(this);
    }
}

音频采集线程:

package lda.com.camerasurfacerecorder.recorder.audio;

import android.media.AudioFormat;
import android.media.AudioRecord;
import android.media.MediaRecorder;
import android.util.Log;

import java.nio.ByteBuffer;
import java.util.concurrent.Callable;

/**
 * Created by lda on 2017/12/1.
 * 
 * 音频采集线程
 */

public class AudioThread implements Callable<Object> {
    public static final String TAG = "AudioThread";
    public static final int SAMPLES_PER_FRAME = 1024;   // AAC, bytes/frame/channel
    public static final int FRAMES_PER_BUFFER = 25;     // AAC, frame/buffer/sec
    private static final int SAMPLE_RATE = 44100;   // 44.1[KHz] is only setting guaranteed to be available on all devices.
    private static final int[] AUDIO_SOURCES = new int[] {
            MediaRecorder.AudioSource.MIC,
            MediaRecorder.AudioSource.DEFAULT,
            MediaRecorder.AudioSource.CAMCORDER,
            MediaRecorder.AudioSource.VOICE_COMMUNICATION,
            MediaRecorder.AudioSource.VOICE_RECOGNITION,
    };
    private AudioEncoder mAudioEncoder;

    public AudioThread(AudioEncoder audioEncoder) {
        mAudioEncoder = audioEncoder;
    }

    @Override
    public Object call() throws Exception {
        android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_URGENT_AUDIO);
        try {
            final int min_buffer_size = AudioRecord.getMinBufferSize(
                    SAMPLE_RATE, AudioFormat.CHANNEL_IN_MONO,
                    AudioFormat.ENCODING_PCM_16BIT);
            int buffer_size = SAMPLES_PER_FRAME * FRAMES_PER_BUFFER;
            if (buffer_size < min_buffer_size)
                buffer_size = ((min_buffer_size / SAMPLES_PER_FRAME) + 1) * SAMPLES_PER_FRAME * 2;

            AudioRecord audioRecord = null;
            for (final int source : AUDIO_SOURCES) {
                try {
                    audioRecord = new AudioRecord(
                            source, SAMPLE_RATE,
                            AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, buffer_size);
                    if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
                        audioRecord = null;
                } catch (final Exception e) {
                    audioRecord = null;
                }
                if (audioRecord != null) break;
            }
            if (audioRecord != null) {
                try {
                    if (mAudioEncoder.isRecording()) {
                        Log.v(TAG, "AudioThread:start audio recording");
                        final ByteBuffer buf = ByteBuffer.allocateDirect(SAMPLES_PER_FRAME);
                        int readBytes;
                        audioRecord.startRecording();
                        try {
                            while (mAudioEncoder.isRecording()) {
                                // read audio data from internal mic
                                buf.clear();
                                readBytes = audioRecord.read(buf, SAMPLES_PER_FRAME);
                                if (readBytes > 0) {
                                    // set audio data to encoder
                                    buf.position(readBytes);
                                    buf.flip();
                                    //将数据传给编码器编码
                                    mAudioEncoder.encodeAudioBuffer(buf, readBytes);
                                }
                            }
                        } finally {
                            audioRecord.stop();
                        }
                    }
                } finally {
                    audioRecord.release();
                }
            } else {
                Log.e(TAG, "failed to initialize AudioRecord");
            }
        } catch (final Exception e) {
            Log.e(TAG, "AudioThread#run", e);
        }
        Log.v(TAG, "AudioThread:finished");
        return null;
    }
}

音视频混合

经过上面的描述,音频和视频的数据编码已经完成,接下来当然是把他们合成一个视频文件啦。首先从代码层次看一个视频,就是视频有视频数据和音频数据两种轨道,只要通过混合器(MediaMuxer)将对应的数据写入对应的轨道即可。

下面是音视频混合的流程:

1,初始化

初始化即将音频编码器和视频编码器输出的编码格式(MediaFormat)分别加入到混合器,混合器会自动分配轨道编号,但必须两个数据轨道都加入了混合器,混合器才算初始化完成

此处非常重要的是,MediaFormat并非是在编码器prepared的时候自己设定的那个MediaFormat,而一定是在第一次数据output的时候,通过MediaCodec.getOutputFormat()获取到的MediaFormat才是最终的编码格式。一般情况下,这个得到的MediaFormat会和prepared配置的MediaFormat会稍有不同(比如你配置的是4M码率,但是得到的可能是3.9M的码率的视频),这是不同手机,不同厂商rom的编码器问题了,但两者基本差异不大。

2,写入数据
音视频编码器将数据源源不断的提供给MediaMuxer写入文件

3,结束混合
当音视频轨道都结束了数据提供之后,此时数据混合完成,才能释放资源

下面是混合器的代码:

package lda.com.camerasurfacerecorder.recorder;

import android.media.MediaCodec;

import java.nio.ByteBuffer;

/**
 * Created by lda on 2017/10/11.
 */

public interface IMuxer {
    void stop();
    void release();
    void writeSampleData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo);
    void start();
}
package lda.com.camerasurfacerecorder.recorder;

import android.media.MediaCodec;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.util.Log;

import java.io.IOException;
import java.nio.ByteBuffer;

/**
 * Created by lda on 2017/10/23.
 */

public class MMuxer implements IMuxer {

    String TAG = MMuxer.class.getSimpleName();
    private MediaMuxer mMuxer;
    private int mVideoIndex = -1;
    private boolean mIsStarted = false;
    private boolean mIsVideoTrackAdd = false;
    private int mTrackCount = 0;
    private boolean mIsAudioTrackAdd = false;
    private int mAudioIndex = -1;
    private boolean mVideoEos = false;
    private boolean mAudioEos = false;

    public MMuxer(String path){
        try {
            mMuxer = new MediaMuxer(path, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
        } catch (IOException e) {
            Log.i(TAG, "init mmuxer error " + e);
        }
    }

    /**
     * 加入媒体轨道
     * @param format
     * @param type
     * @return
     */
    public int addTrack(MediaFormat format, MediaEncoder.MediaType type){
        mTrackCount++;
        if(type == MediaEncoder.MediaType.VIDEO){
            mVideoIndex = mMuxer.addTrack(format);
            mIsVideoTrackAdd = true;
            Log.d(TAG, "addTrack: video====");
            return mVideoIndex;
        }else{
            mAudioIndex = mMuxer.addTrack(format);
            mIsAudioTrackAdd = true;
            Log.d(TAG, "addTrack: audio =====");
            return mAudioIndex;
        }
    }

    public boolean isVideoTrackAdd(){
        return mIsVideoTrackAdd;
    }

    @Override
    public void stop() {
        mIsStarted = false;
        mMuxer.stop();
        Log.d(TAG, "muxer stop---");
    }

    @Override
    public void release() {
        stop();
        mMuxer.release();
        Log.d(TAG, "muxer release---");

    }

    /**
     * 写入媒体数据
     * @param trackIndex
     * @param byteBuf
     * @param bufferInfo
     */
    @Override
    public void writeSampleData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
        try{
            if (mTrackCount > 0 && mIsStarted) {
                mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
                Log.d(TAG, "wrateSampleData-" + trackIndex + " pts=" + bufferInfo.presentationTimeUs);
            }else{

            }
        }catch (Exception e){
            Log.e(TAG, "writeSampleData Error=" + e);
        }
    }

    @Override
    public void start() {
        mIsStarted = true;
        mMuxer.start();
        Log.i(TAG, "start_muxer");
    }

    public boolean isStarted(){
        return mIsStarted;
    }

    public boolean isPrepared() {
        return mIsAudioTrackAdd && mIsVideoTrackAdd;
    }

    /**
     * eos非常重要,一定要等到音频和视频都结束录制才能释放
     * @param trackIndex
     */
    public void eos(int trackIndex) {
        if(trackIndex == mVideoIndex){
            mVideoEos = true;
            Log.d(TAG, "eos video in muxer");
        }
        if(trackIndex == mAudioIndex){
            mAudioEos = true;
            Log.d(TAG, "eos audio in muxer");
        }
        if(mAudioEos && mVideoEos){
            release();
        }
    }
}

其他相关代码

配置:

package lda.com.camerasurfacerecorder;

import android.os.Environment;

import java.io.File;

/**
 * Created by lda on 2017/10/11.
 */

public class Config {
    public static final int VIDEO_WIDTH = 720;//视频宽度
    public static final int VIDEO_HEIGHT = 1280;//高度
    public static String getSaveDir(){//保存路径
        String path = Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator + "00recorder" + File.separator;
        File f = new File(path);
        if(!f.exists() || !f.isDirectory()){
            f.mkdirs();
        }
        return path;
    }

    public static String getSavePath(){
        return getSaveDir() + "aa.mp4";
    }
}

主界面activity:

package lda.com.camerasurfacerecorder;

import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.widget.Button;

public class MainActivity extends AppCompatActivity {

    private CameraGLSurfaceView mCameraSurfaceView;
    private Button mRecordCtrlBtn;
    private boolean mIsRecording = false;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        mCameraSurfaceView = (CameraGLSurfaceView)findViewById(R.id.camera_surfaceview);
        mRecordCtrlBtn = (Button)findViewById(R.id.record_ctrl_btn);
        mRecordCtrlBtn.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View v) {
                if(mIsRecording){
                    Log.i("MainActivity", "stop recording");
                    mCameraSurfaceView.stopRecord();
                    mRecordCtrlBtn.setText("开始录制");
                }else{
                    Log.i("MainActivity", "start recording");
                    mCameraSurfaceView.startRecord();
                    mRecordCtrlBtn.setText("停止录制");
                }
                mIsRecording = !mIsRecording;
            }
        });
    }

    @Override
    protected void onResume() {
        super.onResume();
    }

    @Override
    protected void onPause() {
        mCameraSurfaceView.stopPreview();
        super.onPause();
    }

    @Override
    protected void onDestroy() {
        super.onDestroy();
    }

}

布局文件:

<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout
    xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context="lda.com.camerasurfacerecorder.MainActivity">

    <lda.com.camerasurfacerecorder.CameraGLSurfaceView
        android:id="@+id/camera_surfaceview"
        android:layout_centerInParent="true"
        android:layout_width="match_parent"
        android:layout_height="match_parent"/>

    <Button
        android:id="@+id/record_ctrl_btn"
        android:text="开始录制"
        android:layout_centerHorizontal="true"
        android:layout_alignParentBottom="true"
        android:layout_width="wrap_content"
        android:layout_height="wrap_content"/>
</RelativeLayout>

AndroidManifest.xml:

<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
          package="lda.com.camerasurfacerecorder">

    <uses-permission android:name="android.permission.RECORD_AUDIO"/>
    <uses-permission android:name="android.permission.CAMERA"/>
    <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"/>

    <uses-feature android:glEsVersion="0x00020000" android:required="true" />
    <application
        android:allowBackup="true"
        android:icon="@mipmap/ic_launcher"
        android:label="@string/app_name"
        android:roundIcon="@mipmap/ic_launcher_round"
        android:supportsRtl="true"
        android:theme="@style/AppTheme">
        <activity android:name=".MainActivity"
            android:screenOrientation="portrait">
            <intent-filter>
                <action android:name="android.intent.action.MAIN"/>

                <category android:name="android.intent.category.LAUNCHER"/>
            </intent-filter>
        </activity>
    </application>

</manifest>

至此,Android Surface录制视频就算完结了,当然还有buffer录制视频的文章呢,如果你对另外一种录制方式也感兴趣可以看我这篇文章Android音视频录制(2)——buffer录制

如果你对非摄像头音视频数据混合感兴趣,可以看我这篇文章Android 音视频混合

如果你对音频裁剪有兴趣,可以看我这两篇文章
Android Mp3音频裁剪

Android Wav音频裁剪