1. 程式人生 > >Android Camera2 Opengles2.0 影象實時濾鏡 顯示 視訊編碼

Android Camera2 Opengles2.0 影象實時濾鏡 顯示 視訊編碼

在博文”Android Camera2 Opengles2.0 預覽影象實時濾鏡 視訊編碼”
http://blog.csdn.net/keen_zuxwang/article/details/78366598
的基礎上新增FBO實時濾鏡、回撥顯示—其中用到glReadPixels:
glReadPixels實際上是從緩衝區中讀取資料,如果使用了雙緩衝區,
則預設是從正在顯示的緩衝(即前緩衝)中讀取,而繪製工作是預設繪製到後緩衝區的。因此,如果需要讀取已經繪製好的畫素,往往需要先交換前後緩衝
void glReadPixels( GLint x,
GLint y,
GLsizei width,
GLsizei height,
GLenum format,
GLenum type,
GLvoid * data)

vertex
shader, fragment shader部分不變

增加, FBO 操作類:

public class EasyGlUtils {
    EasyGlUtils(){

    }

    public static void defaultTexParameter(){
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
        GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL
_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE); } public static void useTexParameter(int gl_wrap_s, int gl_wrap_t, int gl_min_filter, int gl_mag_filter){ GLES20.glTexParameterf
(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,gl_wrap_s); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,gl_wrap_t); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,gl_min_filter); GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER,gl_mag_filter); } //生產紋理、並設定紋理型別、尺寸等引數,呼叫GLES20.glDrawElements() GLES20.glDrawArrays()將片元繪製到該設定的紋理上 public static void genTexturesWithParameter(int size, int[] textures,int start, int gl_format,int width,int height){ GLES20.glGenTextures(size, textures, start); for (int i = 0; i < size; i++) { GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textures[i]); GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, gl_format, width, height, 0, gl_format, GLES20.GL_UNSIGNED_BYTE, null); defaultTexParameter(); } GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,0); } public static void generateBindFrameTexture(int[] frameBufferId, int[] renderId, int[] textureId, int width, int height){ //生成fb GLES20.glGenFramebuffers(1, frameBufferId, 0); GLES20.glGenRenderbuffers(1, renderId, 0); genTexturesWithParameter(1, textureId, 0, GLES20.GL_RGBA, width, height); //繫結fb GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId[0]); GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, renderId[0]); GLES20.glRenderbufferStorage(GLES20.GL_RENDERBUFFER, GLES20.GL_DEPTH_COMPONENT16, width, height); //繫結紋理到fb GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId[0], 0); GLES20.glFramebufferRenderbuffer(GLES20.GL_FRAMEBUFFER, GLES20.GL_DEPTH_ATTACHMENT, GLES20.GL_RENDERBUFFER, renderId[0]); } //繫結Framebuffer Texture2D public static void bindFrameTexture(int frameBufferId, int textureId){ GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, frameBufferId); GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, textureId, 0); } public static void unBindFrameBuffer(){ //GLES20.glBindRenderbuffer(GLES20.GL_RENDERBUFFER, 0); GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER,0); } }

shader 操作類增加FBO離屏渲染、顯示部分:

public  class SurfaceRenderer implements Runnable, SurfaceTexture.OnFrameAvailableListener{
    public static String LOG_TAG = SurfaceRenderer.class.getSimpleName();
    private static float squareCoords[] = {
            -1.0f,  1.0f,  // top left
            -1.0f, -1.0f,  // bottom left
             1.0f, -1.0f,  // bottom right
             1.0f,  1.0f   // top right
    };
    private static short drawOrder[] = { 0, 1, 2, 0, 2, 3};
    // Texture to be shown in backgrund  
    private float textureCoords[] = {
            0.0f, 1.0f, 0.0f, 1.0f,
            0.0f, 0.0f, 0.0f, 1.0f,
            1.0f, 0.0f, 0.0f, 1.0f,
            1.0f, 1.0f, 0.0f, 1.0f 
    };

    private int[] textures = new int[1];
    private Context context;
    private int shaderProgram;
    private FloatBuffer vertexBuffer;
    private FloatBuffer textureBuffer;
    private ShortBuffer drawListBuffer;

    private SurfaceTexture videoTexture;
    private float[] videoTextureTransform;
    private boolean frameAvailable = false;

    int textureParamHandle;
    int textureCoordinateHandle;
    int positionHandle;
    int textureTranformHandle;

    protected  Surface surface;
    protected int width;
    protected int height;

    private EGL10 egl;
    private EGLContext eglContext;
    private EGLDisplay eglDisplay;
    private EGLSurface eglSurface;

    TextureViewMediaActivity instance;

    public boolean running = false;
    private float[] frameMatrix=new float[16]; //用於繪製回撥縮放的矩陣

    private boolean isRecord=false;                            
    private boolean isShoot=false;                              
    private ByteBuffer[] outPutBuffer = new ByteBuffer[3]; //用於儲存回撥資料的buffer
    private OnFrameCallback onFrameCallback;  //回撥
    private int frameCallbackWidth, frameCallbackHeight; //回撥資料的寬高
    private int indexOutput=0;  

    public interface OnFrameCallback {
        void onFrame(byte[] bytes, long time);
    }

    public void setOnFrameCallback(int width, int height, OnFrameCallback onFrameCallback){
        this.frameCallbackWidth =  width;
        this.frameCallbackHeight = height;
        if (frameCallbackWidth > 0 && frameCallbackHeight > 0) {
            for(int i=0; i<3; i++) {
                outPutBuffer[i] = ByteBuffer.allocate(width*height*4);
            }
            setFrameCallbackMatrix();
            /*
            IntBuffer imp_fmt = null;
            IntBuffer imp_type = null;  
            GLES20.glGetIntegerv(GLES20.GL_IMPLEMENTATION_COLOR_READ_FORMAT, imp_fmt);  
            GLES20.glGetIntegerv(GLES20.GL_IMPLEMENTATION_COLOR_READ_TYPE, imp_type); 
            */
            this.onFrameCallback = onFrameCallback;
            isRecord = true;
        } else {
            this.onFrameCallback = null;
        }
    }

    private void setFrameCallbackMatrix(){
       if(frameCallbackHeight>0 && frameCallbackWidth>0 && width>0 && height>0){
           //計算輸出的變換矩陣
           MatrixUtils.getMatrix(frameMatrix, MatrixUtils.TYPE_CENTERCROP, width, height, frameCallbackWidth,frameCallbackHeight);
           MatrixUtils.flip(frameMatrix, false, true);
       }
    }

    //需要回調,則縮放圖片到指定大小,讀取資料並回調
    private void callbackIfNeeded() {
        if (onFrameCallback != null && (isRecord || isShoot)) {
             //設定繪製視窗,同一般直接繪製到螢幕的原理是一樣的,這裡只是離屏繪製到Framebuffer繫結紋理上
            GLES20.glViewport(0, 0, frameCallbackWidth, frameCallbackHeight);
            //繫結紋理,紋理輸出
            EasyGlUtils.bindFrameTexture(fFrame[0], fTexture[0]);
            //呼叫GLES20.glDrawElements()  GLES20.glDrawArrays()將片元繪製到該Framebuffer繫結的紋理上
            drawTexture(2); //Y 映象
            //呼叫回撥顯示
            frameCallback();
            //解繫結
            EasyGlUtils.unBindFrameBuffer();
        }
    }

    //讀取資料並回調
    private void frameCallback(){
        //OpenGL提供了簡潔的函式來操作畫素:
        //glReadPixels:讀取一些畫素。當前可以簡單理解為“把已經繪製好的畫素(它可能已經被儲存到顯示卡的視訊記憶體中)讀取到記憶體”。
        //glDrawPixels:繪製一些畫素。當前可以簡單理解為“把記憶體中一些資料作為畫素資料,進行繪製”。
        //glCopyPixels:複製一些畫素。當前可以簡單理解為“把已經繪製好的畫素從一個位置複製到另一個位置”。
        //雖然從功能上看,好象等價於先讀取畫素再繪製畫素,但實際上它不需要把已經繪製的畫素(它可能已經被儲存到顯示卡的視訊記憶體中)轉換為記憶體資料,然後再由記憶體資料進行重新的繪製,
        //所以要比先讀取後繪製快很多。
        //這三個函式可以完成簡單的畫素讀取、繪製和複製任務,但實際上也可以完成更復雜的任務
        //glReadPixels實際上是從緩衝區中讀取資料,如果使用了雙緩衝區,
        //則預設是從正在顯示的緩衝(即前緩衝)中讀取,而繪製工作是預設繪製到後緩衝區的。因此,如果需要讀取已經繪製好的畫素,往往需要先交換前後緩衝
        /*
        void glReadPixels(  GLint x,
                GLint y,
                GLsizei width,
                GLsizei height,
                GLenum format,
                GLenum type,
                GLvoid * data)
        type和format要匹配上:
        format: GL_RGBA,GL_RGB,GL_ALPHA,GL_LUMINANCE等格式
        GL_UNSIGNED_BYTE,0-255 
        GL_UNSIGNED_SHORT_5_6_5, GL_UNSIGNED_SHORT_4_4_4_4, or GL_UNSIGNED_SHORT_5_5_5_1, 這個每一個通道的範圍在0-2n次方的範圍內
                            查詢匹配的format和type值方法:
        glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_TYPE,&eReadType); 
        glGetIntegerv(GL_IMPLEMENTATION_COLOR_READ_FORMAT,&eReadFormat); 
        */
        GLES20.glReadPixels(0, 0, frameCallbackWidth, frameCallbackHeight, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, outPutBuffer[indexOutput]);
        onFrameCallback.onFrame(outPutBuffer[indexOutput].array(),0);
    }

    private int[] fFrame = new int[1];
    private int[] fRender = new int[1];
    private int[] fTexture = new int[1];

    private void deleteFrameBuffer() {
        //GLES20.glDeleteRenderbuffers(1, fRender, 0);
        GLES20.glDeleteFramebuffers(1, fFrame, 0);
        GLES20.glDeleteTextures(1, fTexture, 0);
    }
    public SurfaceRenderer(Context context, Surface surface, int width, int height) {
        Log.e
        ("TAG", "           SurfaceRenderer create       ");
        this.surface = surface;
        this.width = width;
        this.height = height;
        this.running = true;
        this.context = context;

        instance = (TextureViewMediaActivity)context;
        videoTextureTransform = new float[16];

        Thread thread = new Thread(this); // 渲染執行緒
        thread.start();
    }

    @Override
    public void run() {
        initEGL();
        initGLComponents();

        deleteFrameBuffer(); 
        GLES20.glGenFramebuffers(1, fFrame, 0); //產生Framebuffers
        EasyGlUtils.genTexturesWithParameter(1, fTexture, 0, GLES20.GL_RGBA, width, height);//生成紋理

        Log.d(LOG_TAG, "OpenGL init OK. start draw...");

        while (running) {
            if (draw()) {
                  //EGL交換快取區,實現雙快取交換並重新整理顯示快取(由底層的FramebufferNativeWindow輸出--FramebufferNativeWindo是ANativeWindow的繼承類,其內部實現了queuebuffer dequeuebuffer等操作)
                  //雙緩衝重新整理 front buffer 和 back buffer 
                //eglSwapBuffers會去觸發queuebuffer,dequeuebuffer, 
                //queuebuffer將畫好的buffer(back->front)交給surfaceflinger處理, 
                //dequeuebuffer新建立一個buffer用來畫圖 
                egl.eglSwapBuffers(eglDisplay, eglSurface); 
            }
        }

        deinitGLComponents();
        deinitEGL();
    }

    private void initEGL() {
        egl = (EGL10)EGLContext.getEGL();
        //
        eglDisplay = egl.eglGetDisplay(EGL10.EGL_DEFAULT_DISPLAY); 
        //version
        int version[] = new int[2];
        egl.eglInitialize(eglDisplay, version); // 初始化顯示裝置、獲取EGL版本號

        EGLConfig eglConfig = chooseEglConfig(); 

        //將Surface轉換為本地視窗
        eglSurface = egl.eglCreateWindowSurface(eglDisplay, eglConfig, surface, null); // 建立EGLSurface,通過上層傳入的Surface surface建立本地EGLSurface(ANativeWindow)

        eglContext = createContext(egl, eglDisplay, eglConfig);

        try {
            if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
                throw new RuntimeException("GL error:" + GLUtils.getEGLErrorString(egl.eglGetError()));
            }
            //將EGLDisplay、EGLSurface和EGLContext進行繫結(渲染上下文繫結到渲染面,指定當前的環境為繪製環境 EGLContext->context)
            ///eglMakeCurrent後生成的surface就可以利用opengl畫圖了
            if (!egl.eglMakeCurrent(eglDisplay, eglSurface, eglSurface, eglContext)) { // 繫結EGLSurface到本地EGLContext上下文,實現上層opengles的surface到底層egl的eglSurface的全域性環境繫結
                throw new RuntimeException("GL Make current Error"+ GLUtils.getEGLErrorString(egl.eglGetError()));
            }
        }catch (Exception e) {
            e.printStackTrace();
        }
    }

    private void deinitEGL() {
        egl.eglMakeCurrent(eglDisplay, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_SURFACE, EGL10.EGL_NO_CONTEXT); 
        egl.eglDestroySurface(eglDisplay, eglSurface);
        egl.eglDestroyContext(eglDisplay, eglContext);
        egl.eglTerminate(eglDisplay);
        Log.d(LOG_TAG, "OpenGL deinit OK.");
    }

    //建立EGL環境, EGLContext: OpenGL ES圖形上下文,它代表了OpenGL狀態機
    private EGLContext createContext(EGL10 egl, EGLDisplay eglDisplay, EGLConfig eglConfig) {
          //EGLContext 屬性
        int[] attrs = {
                EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, // opengles 客戶版本 2.0 
                EGL10.EGL_NONE
        };
        return egl.eglCreateContext(eglDisplay, eglConfig, EGL10.EGL_NO_CONTEXT, attrs); //根據EGLContext屬性、EGLConfig配置,建立EGLContext(egl上下文)
    }

    //
    private EGLConfig chooseEglConfig() {
        int[] configsCount = new int[1];
        EGLConfig[] configs = new EGLConfig[1];
        int[] attributes = getAttributes();
        int confSize = 1;

        if (!egl.eglChooseConfig(eglDisplay, attributes, configs, confSize, configsCount)) {  
            throw new IllegalArgumentException("Failed to choose config:"+ GLUtils.getEGLErrorString(egl.eglGetError()));
        }
        else if (configsCount[0] > 0) {
            return configs[0];
        }

        return null;
    }

    //EGLConfig 屬性
    private int[] getAttributes()
    {
        return new int[] {
                EGL10.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT,  //渲染型別 EGL_OPENGL_ES2
                EGL10.EGL_RED_SIZE, 8, // 渲染rgba大小 
                EGL10.EGL_GREEN_SIZE, 8,
                EGL10.EGL_BLUE_SIZE, 8,
                EGL10.EGL_ALPHA_SIZE, 8,
                EGL10.EGL_DEPTH_SIZE, 0, // EGL_DEPTH_SIZE 深度、模板尺寸
                EGL10.EGL_STENCIL_SIZE, 0,
                EGL10.EGL_NONE      
        };
    }

    public void onPause(){
        running = false;
    }

    @Override
    protected  void finalize() throws Throwable {
        super.finalize();
        running = false;
    }

    public  int mColorFlag=0;
    public  int xyFlag=0;
    public  int   mRatio;
    public  float ratio=0.5f;
    public  int textureHandle;
    public  int textureIdOne;
    private int gHWidth;
    private int gHHeight;
    private float[] matrix=new float[16];
    private float[] matrix0=new float[16];
    private float[] mModelMatrix=new float[16];
    private float[] mModelMatrix0=new float[16];

    //映象
    public  float[] flip(float[] m,boolean x,boolean y){
        if(x||y){
            Matrix.scaleM(m,0,x?-1:1,y?-1:1,1);
        }
        return m;
    }

    public void setSize(){
        Matrix.setIdentityM(mModelMatrix,0);
          Matrix.setIdentityM(mModelMatrix0,0);

        matrix = flip(mModelMatrix, true, false);
        matrix0 = flip(mModelMatrix0, false, true);
    }

    private void setupGraphics()
    {
        final String vertexShader = HelpUtils.readTextFileFromRawResource(context, R.raw.vetext_sharder);
        final String fragmentShader = HelpUtils.readTextFileFromRawResource(context, R.raw.fragment_sharder);

        final int vertexShaderHandle = HelpUtils.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
        final int fragmentShaderHandle = HelpUtils.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
        shaderProgram = HelpUtils.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
                new String[]{"texture","vPosition","vTexCoordinate","textureTransform"});

        GLES20.glUseProgram(shaderProgram);
        textureParamHandle = GLES20.glGetUniformLocation(shaderProgram, "texture"); // 攝像頭影象外部擴充套件紋理
        textureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinate"); // 頂點紋理座標
        positionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); // 頂點座標
        textureTranformHandle = GLES20.glGetUniformLocation(shaderProgram, "textureTransform");

        textureHandle = GLES20.glGetUniformLocation(shaderProgram, "texture0"); // 獲得貼圖對應的紋理取樣器控制代碼(索引)
        mRatio = GLES20.glGetUniformLocation(shaderProgram, "mratio"); // 融合因子

        gHWidth=GLES20.glGetUniformLocation(shaderProgram,"mWidth"); // 視窗寬、高
        gHHeight=GLES20.glGetUniformLocation(shaderProgram,"mHeight");

        GLES20.glUniform1i(gHWidth,width);
        GLES20.glUniform1i(gHHeight,height);

        setSize();
    }

    private void setupVertexBuffer()
    {
        // Draw list buffer
        ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder. length * 2);
        dlb.order(ByteOrder.nativeOrder()); //轉換成本地位元組序
        drawListBuffer = dlb.asShortBuffer();
        drawListBuffer.put(drawOrder);
        drawListBuffer.position(0);

        // Initialize the texture holder
        ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
        bb.order(ByteOrder.nativeOrder()); //轉換成本地位元組序

        vertexBuffer = bb.asFloatBuffer();
        vertexBuffer.put(squareCoords);
        vertexBuffer.position(0);
    }

    private void setupTexture()
    {
        ByteBuffer texturebb = ByteBuffer.allocateDirect(textureCoords.length * 4);
        texturebb.order(ByteOrder.nativeOrder());  // 轉換成本地位元組序
        textureBuffer = texturebb.asFloatBuffer();
        textureBuffer.put(textureCoords);
        textureBuffer.position(0);

        // Generate the actual texture
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 啟用(使能)相應的紋理單元
        GLES20.glGenTextures(1, textures, 0); // 產生紋理id
        checkGlError("Texture generate");

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); //通過紋理id,繫結到相應的紋理單元,紋理單元記憶體放的型別可以很多種,比如GLES20.GL_TEXTURE_1D、GLES20.GL_TEXTURE_2D、GLES20.GL_TEXTURE_3D、GLES11Ext.GL_TEXTURE_EXTERNAL_OES等

        checkGlError("Texture bind");

        videoTexture = new SurfaceTexture(textures[0]); // 通過建立的紋理id,生成SurfaceTexture
        videoTexture.setOnFrameAvailableListener(this);
    }

    public int initTexture(int drawableId)
      {
            //生成紋理ID
            int[] textures = new int[1];
            GLES20.glGenTextures
           (
                  1,          //產生的紋理id的數量
                textures,   //紋理id的陣列
                0           //偏移量
            );    
            int textureId = textures[0];    
            Log.i(LOG_TAG, " initTexture textureId = " + textureId);

            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST); // 紋素放大、縮小設定GL_LINEAR對應線性濾波,GL_NEAREST對應最近鄰濾波方式
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE); // 紋理邊界處理,當紋理座標超出[0,1]的範圍時該怎麼處理,GL_CLAMP_TO_EDGE --- 紋理座標會被截斷到[0,1]之間。座標值大的被截斷到紋理的邊緣部分,形成了一個拉伸的邊緣
            GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);

        //載入圖片
        InputStream is = context.getResources().openRawResource(drawableId);
        Bitmap bitmapTmp;
        try {
            bitmapTmp = BitmapFactory.decodeStream(is);
        } finally {
            try {
                is.close();
            } 
            catch(IOException e) {
                e.printStackTrace();
            }
        }
        //載紋理
        GLUtils.texImage2D
        (
                GLES20.GL_TEXTURE_2D,   //紋理型別,在OpenGL ES中必須為GL10.GL_TEXTURE_2D
                0,                    //紋理的層次,0表示基本影象層,直接貼圖
                bitmapTmp,    //紋理影象
                0                       //紋理邊框尺寸
        );
        bitmapTmp.recycle();          //紋理載入成功後釋放圖片 
        return textureId;
    }
    protected boolean draw()
    {
        synchronized (this){
            if (frameAvailable) {
                videoTexture.updateTexImage(); // 更新SurfaceTexture紋理影象資訊,然後繫結的GLES11Ext.GL_TEXTURE_EXTERNAL_OES紋理才能渲染
                videoTexture.getTransformMatrix(videoTextureTransform); // 獲取SurfaceTexture紋理變換矩
                frameAvailable = false;
            }
            else{
                return false;
            }
        }
        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f);  //設定清除顏色
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
        //GL_COLOR_BUFFER_BIT 設定視窗顏色
        //GL_DEPTH_BUFFER_BIT 設定深度快取--把所有畫素的深度值設定為最大值(一般為遠裁剪面)
        GLES20.glViewport(0, 0, width, height);
        drawTexture();

        callbackIfNeeded(); //離屏渲染、回撥顯示
        return true;
    }

    private void drawTexture() {
        // Draw texture
          int mHProjMatrix=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix");
        GLES20.glUniformMatrix4fv(mHProjMatrix,1,false,matrix,0);

        int mHProjMatrix0=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix0");
        GLES20.glUniformMatrix4fv(mHProjMatrix0,1,false,matrix0,0);

        int mXyFlag = GLES20.glGetUniformLocation(shaderProgram, "xyFlag"); //映象型別: x映象,y映象---通過不同的變化矩陣與頂點位置向量進行左乘,如:uProjMatrix*vPosition;
        GLES20.glUniform1i(mXyFlag, xyFlag);

        int mColorFlagHandle = GLES20.glGetUniformLocation(shaderProgram, "colorFlag"); // 紋理操作型別(濾鏡處理):飽和度/灰度/冷暖色/放大鏡/模糊/美顏/紋理融合
        GLES20.glUniform1i(mColorFlagHandle, mColorFlag);

        //頂點屬性一般包括位置、顏色、法線、紋理座標屬性
        GLES20.glEnableVertexAttribArray(positionHandle); // 使能相應的頂點位置屬性的頂點屬性陣列
        GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer); // 指定(繫結)該相應的頂點位置屬性的頂點屬性陣列

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); // 攝像頭影象紋理
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glUniform1i(textureParamHandle, 0);

        GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIdOne); // 貼圖的影象紋理
        GLES20.glUniform1i(textureHandle, 1);      

        GLES20.glEnableVertexAttribArray(textureCoordinateHandle);
        GLES20.glVertexAttribPointer(textureCoordinateHandle, 4, GLES20.GL_FLOAT, false, 0, textureBuffer);

        GLES20.glUniformMatrix4fv(textureTranformHandle, 1, false, videoTextureTransform, 0); // GL_TEXTURE_EXTERNAL_OES紋理的變化矩
        GLES20.glUniform1f(mRatio, ratio); // 紋理融合因子

        GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // 根據頂點位置索引進行繪製片元
        GLES20.glDisableVertexAttribArray(positionHandle);
        GLES20.glDisableVertexAttribArray(textureCoordinateHandle);
    }

    protected void initGLComponents() {
        setupVertexBuffer();
        setupTexture();
        setupGraphics();
        textureIdOne = initTexture(R.drawable.bg);

        Message message = new Message();   
          message.what = 1;     
          instance.myHandler.sendMessage(message);
    }

    protected void deinitGLComponents() {
        GLES20.glDeleteTextures(1, textures, 0);
        GLES20.glDeleteProgram(shaderProgram);
        videoTexture.release();
        videoTexture.setOnFrameAvailableListener(null);
    }

    public void checkGlError(String op) {
        int error;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            Log.e("SurfaceTest", op + ": glError " + GLUtils.getEGLErrorString(error));
        }
    }

    public SurfaceTexture getVideoTexture() {
        return videoTexture;
    }

    @Override
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        synchronized (this){
            frameAvailable = true;
        }
    }
}

設定camera2 預覽、mediacodec/medianuxer 視訊編碼設定

package com.vr.jarry.playvideo_texuture;

import android.Manifest;
import android.app.Activity;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.CaptureResult;
import android.hardware.camera2.TotalCaptureResult;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Environment;
import android.os.Handler;
import android.os.Message;
import android.support.v4.app.ActivityCompat;
import android.os.Bundle;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.TextureView;
import android.view.View;
import android.view.Window;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.SeekBar;
import android.widget.Toast;

import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.Arrays;

import com.vr.jarry.playvideo_texuture.SurfaceRenderer.OnFrameCallback;

public class TextureViewMediaActivity extends Activity implements OnFrameCallback, TextureView.SurfaceTextureListener{
    private static final String TAG = "GLViewMediaActivity";
    private boolean clickFlag = false;
    public static final String videoPath = Environment.getExternalStorageDirectory()+"/live.mp4";

    private SurfaceRenderer videoRenderer;
    private Button btn_shutter, btn_mirror, btn_color;
    ImageView imagView;

    Surface mEncoderSurface;
    BufferedOutputStream outputStream;
    private MediaCodec mCodec, mDecodec;
    boolean isEncode = false;
    private MediaMuxer mMuxer;
    TextureView mPreviewView;
    CameraCaptureSession mSession;
    CaptureRequest.Builder mPreviewBuilder;
    public CameraDevice mCameraDevice;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        requestWindowFeature(Window.FEATURE_NO_TITLE);
        setContentView(R.layout.activity_main_0);

        mPreviewView = (TextureView) findViewById(R.id.id_textureview);
        mPreviewView.setSurfaceTextureListener(this);

        imagView = (ImageView) findViewById(R.id.id_textureview0);

        SeekBar seekBar = (SeekBar) findViewById(R.id.id_seekBar);
        seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
            @Override
            public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
                // TODO Auto-generated method stub
                if(videoRenderer != null) {
                    videoRenderer.ratio = progress/100.0f;
                }
            }

            @Override
            public void onStartTrackingTouch(SeekBar seekBar) {
                // TODO Auto-generated method stub

            }

            @Override
            public void onStopTrackingTouch(SeekBar seekBar) {
                // TODO Auto-generated method stub

            }
        });

        btn_color = (Button) findViewById(R.id.btn_color);
        btn_shutter = (Button) findViewById(R.id.btn_shutter);
        btn_mirror = (Button) findViewById(R.id.btn_mirror);
        Button btn_play = (Button) findViewById(R.id.btn_play);

        btn_play.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                File f = new File(mOutputPath); 
                if(f.exists() && mVideoTrack==-1){
                   Log.e(TAG, "       play video     ");
                   Intent intent = new Intent(Intent.ACTION_VIEW);
                   //intent.setDataAndType(Uri.parse(mOutputPath), "video/mp4");
                   intent.setDataAndType(Uri.parse(Environment.getExternalStorageDirectory().getAbsolutePath()+"/mcodecmux26.mp4"), "video/mp4");
                   startActivity(intent);
                }else {
                   Log.e(TAG, "       can not play video     ");
                   if(!f.exists()) {
                       Toast.makeText(TextureViewMediaActivity.this, "Video file not exists!", Toast.LENGTH_SHORT).show();
                   }else {
                       if(mVideoTrack != -1) {  
                          Toast.makeText(TextureViewMediaActivity.this, "Video record not stop!", Toast.LENGTH_SHORT).show();
                       }
                   }
                }
            }
        });

        btn_shutter.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                // TODO Auto-generated method stub
                clickFlag = !clickFlag;
                if(clickFlag) {
                    if(cameraFlag) {
                        Toast.makeText(TextureViewMediaActivity.this, "Start Record!", Toast.LENGTH_SHORT).show();
                        btn_shutter.setText("Stop");

                        try {
                            cameraManager.openCamera(CameraIdList[0], mCameraDeviceStateCallback, null);
                        } catch (CameraAccessException e) {
                            // TODO Auto-generated catch block
                            e.printStackTrace();
                        }

                        startCodec();
                    }else {
                        Toast.makeText(TextureViewMediaActivity.this, "No camera permission!", Toast.LENGTH_SHORT).show();
                    }
                }else {

                    btn_shutter.setText("Start");

                    videoRenderer.running = false;
                    try {
                        videoRenderer.join();
                        Log.e(TAG, "       videoRenderer stop     ");
                    } catch (InterruptedException e) {
                         // TODO Auto-generated catch block
                         e.printStackTrace();
                    }

                    if (mCameraDevice != null) {
                        mCameraDevice.close();
                        mCameraDevice = null;
                    }

                    stopCodec();

                    Toast.makeText(TextureViewMediaActivity.this, "Stop Record!", Toast.LENGTH_SHORT).show();
                    /*
                    try {
                        mSession.stopRepeating();
                        mPreviewBuilder.removeTarget(surface);
                        mPreviewBuilder.removeTarget(surface0);
                        surface.release();
                        surface0.release();
                        surface  = null;
                        surface0 = null;        
                        mSession.close();
                        Log.e(TAG, "       mSession stop     ");
                    } catch (CameraAccessException e) {
                        // TODO Auto-generated catch block
                        e.printStackTrace();
                    }
                    */
                }
            }
        });

        btn_color.setOnClickListener(new OnClickListener() {
            @Override
            public void onClick(View v) {
                // TODO Auto-generated method stub
                if(videoRenderer != null) {
                    if(videoRenderer.mColorFlag == 0) {
                        videoRenderer.mColorFlag = 7;
                        Toast.makeText(TextureViewMediaActivity.this, "Saturation adjust!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 7) {
                        videoRenderer.mColorFlag = 1;
                        Toast.makeText(TextureViewMediaActivity.this, "Gray Color!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 1) {
                        videoRenderer.mColorFlag = 2;
                        Toast.makeText(TextureViewMediaActivity.this, "Warm Color!", Toast.LENGTH_SHORT).show();
                    }else if(videoRenderer.mColorFlag == 2){
                        videoRenderer.mColorFlag = 3;
                        Toast.makeText(TextureViewMediaActivity.this, "Cool Color!", Toast.LENGTH_SHORT).show();