1. 程式人生 > >Android Camera2 Opengles2.0 實時濾鏡(冷暖色/放大鏡/模糊/美顏)

Android Camera2 Opengles2.0 實時濾鏡(冷暖色/放大鏡/模糊/美顏)

https://blog.csdn.net/keen_zuxwang/article/details/78363464

demo:
http://download.csdn.net/download/keen_zuxwang/10041423

1、建立頂點位置、紋理陣列
2、建立、編譯、載入shader程式,獲得shader中各變數的控制代碼(如獲取紋理取樣sampler2D變數的控制代碼)
3、程式通過program給shader傳遞各參量,如:頂點位置、紋理座標,啟用、繫結紋理,傳遞模型/檢視/投影矩陣等, 然後通過glDrawArrays()/glDrawElements()繪製圖元(片元著色器通過這些參量計算出每個畫素的值、然後通過底層EGL 渲染到相應的ANativeWindow)

camera2 攝像頭影象opengles渲染顯示:
1、生成紋理
GLES20.glGenTextures(1, textures, 0); // 產生紋理id
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);//繫結 紋理id
2、建立SurfaceTexture
SurfaceTexture videoTexture = new SurfaceTexture(textures[0]); //通過建立的紋理id,生成SurfaceTexture
3、生成Surface
Surface surface0 = new Surface(videoTexture); // 通過建立的SurfaceTexture,生成Surface
4、新增camera2預覽輸出Surface,從而實現camera影象 -> Surface
mPreviewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); // 建立camera2 捕獲請求,預覽模式
//新增預覽輸出的Surface, 從而實現camera影象 -> Surface
mPreviewBuilder.addTarget(surface);
mPreviewBuilder.addTarget(surface0);
camera.createCaptureSession(Arrays.asList(surface, surface0), mSessionStateCallback, null); // 建立捕獲會話

所以整個攝像頭影象渲染流程:
camera影象 -> Surface -> videoTexture/videoTexture.updateTexImage() -> GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]) ->
GLES20.glDrawElements()

1、vertex shader

attribute vec4 vPosition;
attribute vec4 vTexCoordinate;
uniform mat4 textureTransform;
uniform mat4 uProjMatrix;
uniform mat4 uProjMatrix0;
uniform int xyFlag; // 映象選擇

//參量傳遞->fragment shader
varying vec2 v_TexCoordinate;
varying vec4 gPosition;
varying vec2 varyPostion;

void main () {
v_TexCoordinate = (textureTransform * vTexCoordinate).xy;
//gl_Position = vPosition;
if(xyFlag==0){
gl_Position = vPosition;
}else if(xyFlag==1){
gl_Position = uProjMatrix*vPosition; //變換矩左乘
}else if(xyFlag==2){
gl_Position = uProjMatrix0*vPosition;
}
gPosition = gl_Position;
varyPostion = vPosition.xy;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
2、fragment shader

#extension GL_OES_EGL_image_external : require // 外部擴充套件影象紋理
precision mediump float;
uniform samplerExternalOES texture; //外部擴充套件紋理取樣器變數
uniform sampler2D texture0; //貼圖紋理取樣器變數
uniform int colorFlag; // 濾鏡型別
uniform float mratio; // 紋理融合因子
const highp float mWidth=640.0;
const highp float mHeight=480.0;
const highp vec3 W = vec3(0.299,0.587,0.114);
const mediump vec3 luminanceWeighting = vec3(0.2125, 0.7154, 0.0721); //光亮度裡三個值相加要為1,各個值代表著顏色的百分比,中間是綠色的值,70%的比重會讓效果更好點。
const lowp float saturation=0.5;
const highp float radius = 1.41;
const highp vec2 center = vec2(0.5, 0.5);
const highp float refractiveIndex=0.5;
//矩形融合區域
const vec2 leftBottom = vec2(-1.0, 0.40);
const vec2 rightTop = vec2(-0.40, 1.0);
//模擬座標陣列
vec2 blurCoordinates[24];

//從vertex shader傳入的參量
varying vec4 gPosition;
varying vec2 v_TexCoordinate;
varying vec2 varyPostion;

float hardLight(float color)
{
if(color <= 0.5)
color = color * color * 2.0;
else
color = 1.0 - ((1.0 - color)*(1.0 - color) * 2.0);
return color;
}

void modifyColor(vec4 color){
color.r=max(min(color.r,1.0),0.0);
color.g=max(min(color.g,1.0),0.0);
color.b=max(min(color.b,1.0),0.0);
color.a=max(min(color.a,1.0),0.0);
}

void main(){
if(colorFlag==0){
//矩形區域融合
if (varyPostion.x >= leftBottom.x && varyPostion.x <= rightTop.x
&& varyPostion.y >= leftBottom.y && varyPostion.y <= rightTop.y) {
if(mratio < 0.0000001){ //暖色效果
vec4 color = texture2D(texture, v_TexCoordinate);
vec4 deltaColor = color + vec4(0.1, 0.1, 0.0, 0.0); // 暖色
modifyColor(deltaColor);
gl_FragColor=deltaColor;
}else if(mratio > 0.99){ //放大鏡效果
gl_FragColor= texture2D(texture, vec2(v_TexCoordinate.x/2.0+0.25, v_TexCoordinate.y/2.0+0.25)); //nColor;
}else{
vec2 tex0 = vec2((varyPostion.x-leftBottom.x)/(rightTop.x-leftBottom.x),
1.0-(varyPostion.y-leftBottom.y)/(rightTop.y-leftBottom.y));
vec4 color = texture2D(texture0, tex0);
gl_FragColor = color*mratio + texture2D(texture,v_TexCoordinate)*(1.0-mratio); //1.0-v_TexCoordinate
//gl_FragColor = texture2D(texture, 1.0-v_TexCoordinate);
}
}else{
//vec4 color1 = texture2D(texture, v_TexCoordinate);
//vec4 color2 = texture2D(texture0, v_TexCoordinate);//vec2(v_TexCoordinate.s/10, v_TexCoordinate.t/10));
// gl_FragColor = mix(color1, color2, mratio);
gl_FragColor = texture2D(texture, v_TexCoordinate);
}
}
else if(colorFlag==7){
//光亮度裡三個值相加要為1,各個值代表著顏色的百分比,中間是綠色的值,70%的比重會讓效果更好點。
vec4 textureColor = texture2D(texture, v_TexCoordinate);
float luminance = dot(textureColor.rgb, luminanceWeighting); //GLSL中的點乘運算,線性代數的點運算子相乘兩個數字。點乘計算需要將紋理顏色資訊和相對應的亮度權重相乘。然後取出所有的三個值相加到一起計算得到這個畫素的中和亮度值。
vec3 greyScaleColor = vec3(luminance);
gl_FragColor = vec4(mix(greyScaleColor, textureColor.rgb, saturation), textureColor.w); //用mix函式把計算的灰度值,初識的紋理顏色和得到的飽和度資訊結合起來。
}
else if(colorFlag==8){
float aspectRatio = mWidth/mHeight;
vec2 textureCoordinateToUse = vec2(v_TexCoordinate.x, (v_TexCoordinate.y * aspectRatio + 0.5 - 0.5 * aspectRatio));
//歸一化座標空間需要考慮螢幕是一個單位寬和一個單位長。
float distanceFromCenter = distance(center, textureCoordinateToUse); //center
//計算特定畫素點距離球形的中心有多遠。使用GLSL內建的distance()函式,用勾股定律計算出中心座標和長寬比矯正過的紋理座標的距離
float checkForPresenceWithinSphere = step(distanceFromCenter, radius); //計算片段是否在球體內。
distanceFromCenter = distanceFromCenter / radius; //標準化到球心的距離,重新設定distanceFromCenter
float normalizedDepth = radius * sqrt(1.0 - distanceFromCenter * distanceFromCenter); //模擬一個玻璃球,需要計算球的“深度”是多少。
vec3 sphereNormal = normalize(vec3(textureCoordinateToUse - center, normalizedDepth)); //歸一化
vec3 refractedVector = refract(vec3(0.0, 0.0, -1.0), sphereNormal, refractiveIndex);
//GLSL的refract()函式以剛才建立的球法線和折射率來計算當光線通過球時從任意一個點看起來如何。

gl_FragColor = texture2D(texture, (refractedVector.xy + 1.0) * 0.5) * checkForPresenceWithinSphere; //最後湊齊所有計算需要的顏色資訊。
}
else if(colorFlag==1){ //將此灰度值作為輸出顏色的RGB值,這樣就會變成黑白濾鏡
vec4 color = texture2D(texture, v_TexCoordinate);
float fGrayColor = (0.3*color.r + 0.59*color.g + 0.11*color.b); // 求灰度值
gl_FragColor = vec4(fGrayColor, fGrayColor, fGrayColor, 1.0);
}
else if(colorFlag==2){ //冷暖色調
vec4 color = texture2D(texture, v_TexCoordinate);
vec4 deltaColor = color + vec4(0.1, 0.1, 0.0, 0.0); // 暖色
modifyColor(deltaColor);
gl_FragColor=deltaColor;
}
else if(colorFlag==3){ //增加亮度、降低亮度等
vec4 color = texture2D(texture, v_TexCoordinate);
vec4 deltaColor = color + vec4(0.0, 0.0, 0.1, 0.0); //vec4(0.006, 0.004, 0.002, 0.0); // blue色
modifyColor(deltaColor);
gl_FragColor=deltaColor;
}
else if(colorFlag==4){ //放大鏡效果
vec4 nColor=texture2D(texture, v_TexCoordinate);
float uXY = mWidth/mHeight;
vec2 vChange = vec2(0.0, 0.0);
float dis = distance(vec2(gPosition.x, gPosition.y/uXY), vChange);
if(dis < 0.5){ //圓形放大區域
nColor=texture2D(texture,vec2(v_TexCoordinate.x/2.0+0.25, v_TexCoordinate.y/2.0+0.25));
}
gl_FragColor=nColor;
}
else if(colorFlag==5){ //類似高斯模糊、徑向模糊
vec4 nColor=texture2D(texture, v_TexCoordinate);
vec3 vChangeColor = vec3(0.025, 0.025, 0.025); // 定義邊距

//取周邊紋理畫素值求平均
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.r,v_TexCoordinate.y-vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.r,v_TexCoordinate.y+vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.r,v_TexCoordinate.y-vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.r,v_TexCoordinate.y+vChangeColor.r));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.g,v_TexCoordinate.y-vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.g,v_TexCoordinate.y+vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.g,v_TexCoordinate.y-vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.g,v_TexCoordinate.y+vChangeColor.g));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.b,v_TexCoordinate.y-vChangeColor.b));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x-vChangeColor.b,v_TexCoordinate.y+vChangeColor.b));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.b,v_TexCoordinate.y-vChangeColor.b));
nColor+=texture2D(texture,vec2(v_TexCoordinate.x+vChangeColor.b,v_TexCoordinate.y+vChangeColor.b));
nColor/=13.0;
gl_FragColor=nColor;
}
else if(colorFlag==6)
{
float mul_x = 2.0 / mWidth;
float mul_y = 2.0 / mHeight;
float pParams = 0.0;
vec2 pStepOffset = vec2(mul_x, mul_y);
vec3 centralColor = texture2D(texture, v_TexCoordinate).rgb;

blurCoordinates[0] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, -10.0);
blurCoordinates[1] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, 10.0);
blurCoordinates[2] = v_TexCoordinate.xy + pStepOffset * vec2(-10.0, 0.0);
blurCoordinates[3] = v_TexCoordinate.xy + pStepOffset * vec2(10.0, 0.0);
blurCoordinates[4] = v_TexCoordinate.xy + pStepOffset * vec2(5.0, -8.0);
blurCoordinates[5] = v_TexCoordinate.xy + pStepOffset * vec2(5.0, 8.0);
blurCoordinates[6] = v_TexCoordinate.xy + pStepOffset * vec2(-5.0, 8.0);
blurCoordinates[7] = v_TexCoordinate.xy + pStepOffset * vec2(-5.0, -8.0);
blurCoordinates[8] = v_TexCoordinate.xy + pStepOffset * vec2(8.0, -5.0);
blurCoordinates[9] = v_TexCoordinate.xy + pStepOffset * vec2(8.0, 5.0);
blurCoordinates[10] = v_TexCoordinate.xy + pStepOffset * vec2(-8.0, 5.0);
blurCoordinates[11] = v_TexCoordinate.xy + pStepOffset * vec2(-8.0, -5.0);
blurCoordinates[12] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, -6.0);
blurCoordinates[13] = v_TexCoordinate.xy + pStepOffset * vec2(0.0, 6.0);
blurCoordinates[14] = v_TexCoordinate.xy + pStepOffset * vec2(6.0, 0.0);
blurCoordinates[15] = v_TexCoordinate.xy + pStepOffset * vec2(-6.0, 0.0);
blurCoordinates[16] = v_TexCoordinate.xy + pStepOffset * vec2(-4.0, -4.0);
blurCoordinates[17] = v_TexCoordinate.xy + pStepOffset * vec2(-4.0, 4.0);
blurCoordinates[18] = v_TexCoordinate.xy + pStepOffset * vec2(4.0, -4.0);
blurCoordinates[19] = v_TexCoordinate.xy + pStepOffset * vec2(4.0, 4.0);
blurCoordinates[20] = v_TexCoordinate.xy + pStepOffset * vec2(-2.0, -2.0);
blurCoordinates[21] = v_TexCoordinate.xy + pStepOffset * vec2(-2.0, 2.0);
blurCoordinates[22] = v_TexCoordinate.xy + pStepOffset * vec2(2.0, -2.0);
blurCoordinates[23] = v_TexCoordinate.xy + pStepOffset * vec2(2.0, 2.0);

float sampleColor = centralColor.g * 22.0;
sampleColor += texture2D(texture, blurCoordinates[0]).g;
sampleColor += texture2D(texture, blurCoordinates[1]).g;
sampleColor += texture2D(texture, blurCoordinates[2]).g;
sampleColor += texture2D(texture, blurCoordinates[3]).g;
sampleColor += texture2D(texture, blurCoordinates[4]).g;
sampleColor += texture2D(texture, blurCoordinates[5]).g;
sampleColor += texture2D(texture, blurCoordinates[6]).g;
sampleColor += texture2D(texture, blurCoordinates[7]).g;
sampleColor += texture2D(texture, blurCoordinates[8]).g;
sampleColor += texture2D(texture, blurCoordinates[9]).g;
sampleColor += texture2D(texture, blurCoordinates[10]).g;
sampleColor += texture2D(texture, blurCoordinates[11]).g;
sampleColor += texture2D(texture, blurCoordinates[12]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[13]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[14]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[15]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[16]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[17]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[18]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[19]).g * 2.0;
sampleColor += texture2D(texture, blurCoordinates[20]).g * 3.0;
sampleColor += texture2D(texture, blurCoordinates[21]).g * 3.0;
sampleColor += texture2D(texture, blurCoordinates[22]).g * 3.0;
sampleColor += texture2D(texture, blurCoordinates[23]).g * 3.0;

sampleColor = sampleColor / 62.0;

float highPass = centralColor.g - sampleColor + 0.5;

for(int i = 0; i < 5;i++)
{
highPass = hardLight(highPass);
}
float luminance = dot(centralColor, W);

float alpha = pow(luminance, pParams);

vec3 smoothColor = centralColor + (centralColor-vec3(highPass))*alpha*0.1;

gl_FragColor = vec4(mix(smoothColor.rgb, max(smoothColor, centralColor), alpha), 1.0);
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
3、shader工具類

public class HelpUtils
{
private static final String TAG = "ShaderHelper";
public static int compileShader(final int shaderType, final String shaderSource)
{
int shaderHandle = GLES20.glCreateShader(shaderType);
if (shaderHandle != 0)
{
// Pass in the shader source.
GLES20.glShaderSource(shaderHandle, shaderSource);
// Compile the shader.
GLES20.glCompileShader(shaderHandle);
// Get the compilation status.
final int[] compileStatus = new int[1];
GLES20.glGetShaderiv(shaderHandle, GLES20.GL_COMPILE_STATUS, compileStatus, 0);
// If the compilation failed, delete the shader.
if (compileStatus[0] == 0)
{
Log.e(TAG, "Error compiling shader: " + GLES20.glGetShaderInfoLog(shaderHandle));
GLES20.glDeleteShader(shaderHandle);
shaderHandle = 0;
}
}
if (shaderHandle == 0){
throw new RuntimeException("Error creating shader.");
}
return shaderHandle;
}

public static int createAndLinkProgram(final int vertexShaderHandle, final int fragmentShaderHandle, final String[] attributes)
{
int programHandle = GLES20.glCreateProgram();
if (programHandle != 0)
{
// Bind the vertex shader to the program.
GLES20.glAttachShader(programHandle, vertexShaderHandle);
// Bind the fragment shader to the program.
GLES20.glAttachShader(programHandle, fragmentShaderHandle);
// Bind attributes
if (attributes != null){
final int size = attributes.length;
for (int i = 0; i < size; i++){
GLES20.glBindAttribLocation(programHandle, i, attributes[i]);
}
}
// Link the two shaders together into a program.
GLES20.glLinkProgram(programHandle);
// Get the link status.
final int[] linkStatus = new int[1];
GLES20.glGetProgramiv(programHandle, GLES20.GL_LINK_STATUS, linkStatus, 0);

// If the link failed, delete the program.
if (linkStatus[0] == 0) {
Log.e(TAG, "Error compiling program: " + GLES20.glGetProgramInfoLog(programHandle));
GLES20.glDeleteProgram(programHandle);
programHandle = 0;
}
}
if (programHandle == 0){
throw new RuntimeException("Error creating program.");
}
return programHandle;
}

public static int loadTexture(final Context context, final int resourceId) {
final int[] textureHandle = new int[1];
GLES20.glGenTextures(1, textureHandle, 0);
if (textureHandle[0] != 0) {
final BitmapFactory.Options options = new BitmapFactory.Options();
options.inScaled = false; // No pre-scaling

// Read in the resource
final Bitmap bitmap = BitmapFactory.decodeResource(
context.getResources(), resourceId, options);

// Bind to the texture in OpenGL
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
// Set filtering
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D,
GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);

// Load the bitmap into the bound texture.
GLUtils.texImage2D(GLES20.GL_TEXTURE_2D, 0, bitmap, 0);
// Recycle the bitmap, since its data has been loaded into OpenGL.
bitmap.recycle();
}

if (textureHandle[0] == 0) {
throw new RuntimeException("Error loading texture.");
}
return textureHandle[0];
}

public static String readTextFileFromRawResource(final Context context,
final int resourceId)
{
final InputStream inputStream = context.getResources().openRawResource(
resourceId);
final InputStreamReader inputStreamReader = new InputStreamReader(
inputStream);
final BufferedReader bufferedReader = new BufferedReader(
inputStreamReader);

String nextLine;
final StringBuilder body = new StringBuilder();

try{
while ((nextLine = bufferedReader.readLine()) != null){
body.append(nextLine);
body.append('\n');
}
}
catch (IOException e){
return null;
}

return body.toString();
}

//從sh指令碼中載入shader內容的方法
public static String loadFromAssetsFile(String fname,Resources r)
{
String result=null;
try {
InputStream in=r.getAssets().open(fname);
int ch=0;
ByteArrayOutputStream baos = new ByteArrayOutputStream();
while((ch=in.read())!=-1) {
baos.write(ch);
}
byte[] buff=baos.toByteArray();
baos.close();
in.close();
result=new String(buff,"UTF-8");
result=result.replaceAll("\\r\\n","\n");
}
catch(Exception e){
e.printStackTrace();
}
return result;
}
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
4、GLViewMediaActivity類, shader操作類,實現GLSurfaceView.Renderer介面
通過建立的SurfaceTexture videoTexture(textures[0]生成的),生成Surface,所以整個攝像頭影象渲染流程:
camera影象 -> Surface -> videoTexture/videoTexture.updateTexImage() -> GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]) ->
GLES20.glDrawElements()

public class GLViewMediaActivity extends Activity implements TextureView.SurfaceTextureListener, GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
public static final String videoPath = Environment.getExternalStorageDirectory()+"/live.mp4";
public static final String TAG = "GLViewMediaActivity";
private static float squareCoords[] = {
-1.0f, 1.0f, // top left
-1.0f, -1.0f, // bottom left
1.0f, -1.0f, // bottom right
1.0f, 1.0f // top right
};
private static short drawOrder[] = {0, 1, 2, 0, 2, 3}; // Texture to be shown in backgrund
private float textureCoords[] = {
0.0f, 1.0f, 0.0f, 1.0f,
0.0f, 0.0f, 0.0f, 1.0f,
1.0f, 0.0f, 0.0f, 1.0f,
1.0f, 1.0f, 0.0f, 1.0f
};

private int[] textures = new int[1];
private int width, height;
private int shaderProgram;
private FloatBuffer vertexBuffer;
private FloatBuffer textureBuffer;
private ShortBuffer drawListBuffer;
private float[] videoTextureTransform = new float[16];
private SurfaceTexture videoTexture;
private GLSurfaceView glView;

private Context context;
private RelativeLayout previewLayout = null;

private boolean frameAvailable = false;
int textureParamHandle;
int textureCoordinateHandle;
int positionHandle;
int textureTranformHandle;

public int mRatio;
public float ratio=0.5f;
public int mColorFlag=0;
public int xyFlag=0;
TextureView mPreviewView;
CameraCaptureSession mSession;
CaptureRequest.Builder mPreviewBuilder;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_1);
context = this;
glView = new GLSurfaceView(this);

previewLayout = (RelativeLayout)findViewById(R.id.previewLayout);
//RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(640,480 );
previewLayout.addView(glView);//, layoutParams);
mPreviewView = (TextureView) findViewById(R.id.id_textureview);
mPreviewView.setSurfaceTextureListener(this);

glView.setEGLContextClientVersion(2);
glView.setRenderer(this);
//glView.setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);

SeekBar seekBar = (SeekBar) findViewById(R.id.id_seekBar);
seekBar.setOnSeekBarChangeListener(new SeekBar.OnSeekBarChangeListener() {
@Override
public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) {
// TODO Auto-generated method stub
ratio = progress/100.0f; // 貼圖紋理 & 攝像頭影象外部擴充套件紋理的融合因子
}

@Override
public void onStartTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub

}

@Override
public void onStopTrackingTouch(SeekBar seekBar) {
// TODO Auto-generated method stub

}
});

Button btn_color = (Button)findViewById(R.id.btn_color);
Button btn_mirror = (Button)findViewById(R.id.btn_mirror);

btn_color.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
// 濾鏡型別選擇
if(mColorFlag == 0) {
mColorFlag = 7;
Toast.makeText(GLViewMediaActivity.this, "Saturation adjust!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 7) {
mColorFlag = 1;
Toast.makeText(GLViewMediaActivity.this, "Gray Color!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 1) {
mColorFlag = 2;
Toast.makeText(GLViewMediaActivity.this, "Warm Color!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 2){
mColorFlag = 3;
Toast.makeText(GLViewMediaActivity.this, "Cool Color!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 3){
mColorFlag = 4;
Toast.makeText(GLViewMediaActivity.this, "Amplify!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 4){
mColorFlag = 5;
Toast.makeText(GLViewMediaActivity.this, "Vague!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag == 5){
mColorFlag = 6;
Toast.makeText(GLViewMediaActivity.this, "Beauty!", Toast.LENGTH_SHORT).show();
}else if(mColorFlag ==6){
mColorFlag = 0;
Toast.makeText(GLViewMediaActivity.this, "Orignal Color!", Toast.LENGTH_SHORT).show();
}
}
});

btn_mirror.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// TODO Auto-generated method stub
//X 、Y軸映象選擇
if(xyFlag == 0) {
Toast.makeText(GLViewMediaActivity.this, "X Mirror!", Toast.LENGTH_SHORT).show();
xyFlag = 1;
}else if(xyFlag == 1){
xyFlag = 2;
Toast.makeText(GLViewMediaActivity.this, "Y Mirror!", Toast.LENGTH_SHORT).show();
}else if(xyFlag == 2) {
xyFlag = 0;
Toast.makeText(GLViewMediaActivity.this, "Normal!", Toast.LENGTH_SHORT).show();
}
}
});
}

@Override
protected void onPause() {
super.onPause();
}
@Override
protected void onDestroy() {
super.onDestroy();
}

@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
CameraManager cameraManager = (CameraManager) getSystemService(CAMERA_SERVICE);
try {
Log.i(TAG, "onSurfaceTextureAvailable: width = " + width + ", height = " + height);
String[] CameraIdList = cameraManager.getCameraIdList();
CameraCharacteristics characteristics = cameraManager.getCameraCharacteristics(CameraIdList[0]);
characteristics.get(CameraCharacteristics.INFO_SUPPORTED_HARDWARE_LEVEL);
// 6.0 動態獲取許可權
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
//startCodec();
cameraManager.openCamera(CameraIdList[0], mCameraDeviceStateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {}

@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}

@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {}
CameraDevice mCameraDevice;
private CameraDevice.StateCallback mCameraDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
Log.i(TAG, " CameraDevice.StateCallback onOpened ");
try {
mCameraDevice = camera;
startPreview(camera);
} catch (CameraAccessException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

@Override
public void onDisconnected(CameraDevice camera) {
if (null != mCameraDevice) {
mCameraDevice.close();
GLViewMediaActivity.this.mCameraDevice = null;
}
}

@Override
public void onError(CameraDevice camera, int error) {}
};

private void startPreview(CameraDevice camera) throws CameraAccessException {
SurfaceTexture texture = mPreviewView.getSurfaceTexture();
texture.setDefaultBufferSize(mPreviewView.getWidth(), mPreviewView.getHeight());
Surface surface = new Surface(texture); // TextureView -> SurfaceTexture -> Surface

Surface surface0 = new Surface(videoTexture); // 通過建立的SurfaceTexture,生成Surface,videoTexture 由textures[0]生成的,所以整個攝像頭影象渲染流程:camera影象 -> Surface -> videoTexture/ videoTexture.updateTexImage() -> GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);

Log.i(TAG, " startPreview ");
try {
mPreviewBuilder = camera.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW); //CameraDevice.TEMPLATE_STILL_CAPTURE
} catch (CameraAccessException e) {
e.printStackTrace();
}
//新增預覽輸出的Surface: camera影象 -> Surface
mPreviewBuilder.addTarget(surface);
mPreviewBuilder.addTarget(surface0);
camera.createCaptureSession(Arrays.asList(surface, surface0), mSessionStateCallback, null);
}

//1、CameraCaptureSession.StateCallback
private CameraCaptureSession.StateCallback mSessionStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
Log.i(TAG, " onConfigured ");
//session.capture(mPreviewBuilder.build(), mSessionCaptureCallback, mHandler);
mSession = session;
//自動聚焦
mPreviewBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//自動曝光
mPreviewBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
//int rotation = getWindowManager().getDefaultDisplay().getRotation();
//mPreviewBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
session.setRepeatingRequest(mPreviewBuilder.build(), null, null); //null
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

@Override
public void onConfigureFailed(CameraCaptureSession session) {}
};

int callback_time;
//2、 CameraCaptureSession.CaptureCallback()
private CameraCaptureSession.CaptureCallback mSessionCaptureCallback =new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
//Toast.makeText(GLViewMediaActivity.this, "picture success!", Toast.LENGTH_SHORT).show();
callback_time++;
Log.i(TAG, " CaptureCallback = "+callback_time);
}

@Override
public void onCaptureProgressed(CameraCaptureSession session, CaptureRequest request, CaptureResult partialResult) {
Toast.makeText(GLViewMediaActivity.this, "picture failed!", Toast.LENGTH_SHORT).show();
}
};

public int initTexture(int drawableId)
{
//生成紋理ID
int[] textures = new int[1];
GLES20.glGenTextures
(
1, //產生的紋理id的數量
textures, //紋理id的陣列
0 //偏移量
);
int textureId = textures[0];
Log.i(TAG, " initTexture textureId = " + textureId);

GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER,GLES20.GL_NEAREST);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D,GLES20.GL_TEXTURE_MAG_FILTER,GLES20.GL_LINEAR);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S,GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T,GLES20.GL_CLAMP_TO_EDGE);

//載入圖片
InputStream is = this.getResources().openRawResource(drawableId);
Bitmap bitmapTmp;
try {
bitmapTmp = BitmapFactory.decodeStream(is);
} finally {
try {
is.close();
}
catch(IOException e) {
e.printStackTrace();
}
}
//載入紋理
GLUtils.texImage2D
(
GLES20.GL_TEXTURE_2D, //紋理型別,在OpenGL ES中必須為GL10.GL_TEXTURE_2D
0, //紋理的層次,0表示基本影象層,直接貼圖,多重紋理mipmap,可選其它level層
bitmapTmp, //紋理影象
0 //紋理邊框尺寸
);
bitmapTmp.recycle(); //紋理載入成功後釋放圖片
return textureId;
}

int textureIdOne;
int textureHandle;

@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
setupGraphics();
setupVertexBuffer();
setupTexture();
textureIdOne= initTexture(R.drawable.bg); // 生成貼圖紋理
}

@Override
public void onSurfaceChanged(GL10 gl, int width, int height) {
this.width = width;
this.height = height;
GLES20.glViewport(0, 0, width, height);
setSize(width, height); //根據高、寬設定模型/檢視/投影矩陣
}

@Override
public void onDrawFrame(GL10 gl) {
synchronized (this) {
if (frameAvailable) {
videoTexture.updateTexImage(); // 更新SurfaceTexture紋理影象資訊,然後繫結的GLES11Ext.GL_TEXTURE_EXTERNAL_OES紋理才能渲染
videoTexture.getTransformMatrix(videoTextureTransform); // 獲取SurfaceTexture紋理變換矩
frameAvailable = false;
}
}
GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f); //設定清除顏色
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
//GL_COLOR_BUFFER_BIT 設定視窗顏色
//GL_DEPTH_BUFFER_BIT 設定深度快取--把所有畫素的深度值設定為最大值(一般為遠裁剪面)
drawTexture();
}

@Override
public void onFrameAvailable(SurfaceTexture surfaceTexture) {
synchronized (this) {
frameAvailable = true;
}
}

private float[] mViewMatrix=new float[16];
private float[] mProjectMatrix=new float[16];
private float[] mModelMatrix=new float[16];
private float[] mModelMatrix0=new float[16];
private float[] matrix=new float[16];
private float[] matrix0=new float[16];
private int gHWidth;
private int gHHeight;

public void setSize(int width,int height){
float ratio=(float)width/height;
//投影矩 -- 視窗顯示
Matrix.frustumM(mProjectMatrix, 0, -ratio, ratio, -1, 1, 1, 3);
//檢視矩 -- 相機位置/相機目標位置/相機各朝向
Matrix.setLookAtM(mViewMatrix, 0,
0.0f, 0.0f, 1.0f, //0f, 0.0f, 0.0f,
0.0f, 0.0f, 0.0f, //0.0f, 0.0f,-1.0f,
0f, 1.0f, 0.0f);
//模型矩 -- 物體本身的位置、朝向
Matrix.setIdentityM(mModelMatrix,0);
Matrix.setIdentityM(mModelMatrix0,0);
//Matrix.scaleM(mModelMatrix,0,2,2,2);

Matrix.multiplyMM(matrix,0,mProjectMatrix,0,mViewMatrix,0); //矩陣乘法
Matrix.multiplyMM(matrix0,0,mProjectMatrix,0,mViewMatrix,0);

matrix = flip(mModelMatrix, true, false);
matrix0 = flip(mModelMatrix0, false, true);
}

public static float[] rotate(float[] m,float angle){
Matrix.rotateM(m,0,angle,0,0,1);
return m;
}

//映象
public float[] flip(float[] m,boolean x,boolean y){
if(x||y){
Matrix.scaleM(m,0,x?-1:1,y?-1:1,1);
}
return m;
}

private void setupGraphics() {
final String vertexShader = HelpUtils.readTextFileFromRawResource(context, R.raw.vetext_sharder);
final String fragmentShader = HelpUtils.readTextFileFromRawResource(context, R.raw.fragment_sharder);

final int vertexShaderHandle = HelpUtils.compileShader(GLES20.GL_VERTEX_SHADER, vertexShader);
final int fragmentShaderHandle = HelpUtils.compileShader(GLES20.GL_FRAGMENT_SHADER, fragmentShader);
shaderProgram = HelpUtils.createAndLinkProgram(vertexShaderHandle, fragmentShaderHandle,
new String[]{"texture", "vPosition", "vTexCoordinate", "textureTransform"});

GLES20.glUseProgram(shaderProgram);
textureParamHandle = GLES20.glGetUniformLocation(shaderProgram, "texture"); // 攝像頭影象外部擴充套件紋理
textureCoordinateHandle = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinate"); // 頂點紋理座標
positionHandle = GLES20.glGetAttribLocation(shaderProgram, "vPosition"); // 頂點座標
textureTranformHandle = GLES20.glGetUniformLocation(shaderProgram, "textureTransform");

textureHandle = GLES20.glGetUniformLocation(shaderProgram, "texture0"); // 獲得貼圖對應的紋理取樣器控制代碼(索引)
mRatio = GLES20.glGetUniformLocation(shaderProgram, "mratio"); // 融合因子
gHWidth=GLES20.glGetUniformLocation(shaderProgram,"mWidth"); // 視窗寬、高
gHHeight=GLES20.glGetUniformLocation(shaderProgram,"mHeight");

GLES20.glUniform1i(gHWidth,width);
GLES20.glUniform1i(gHHeight,height);
}

private void setupVertexBuffer() {
// Draw list buffer
ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
dlb.order(ByteOrder.nativeOrder()); // 轉換成本地位元組序
drawListBuffer = dlb.asShortBuffer();
drawListBuffer.put(drawOrder);
drawListBuffer.position(0);

// Initialize the texture holder
//頂點位置
ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
bb.order(ByteOrder.nativeOrder()); // 轉換成本地位元組序
vertexBuffer = bb.asFloatBuffer();
vertexBuffer.put(squareCoords);
vertexBuffer.position(0);

//紋理座標
ByteBuffer texturebb = ByteBuffer.allocateDirect(textureCoords.length * 4);
texturebb.order(ByteOrder.nativeOrder()); // 轉換成本地位元組序
textureBuffer = texturebb.asFloatBuffer();
textureBuffer.put(textureCoords);
textureBuffer.position(0);
}

private void setupTexture() {
// Generate the actual texture
GLES20.glActiveTexture(GLES20.GL_TEXTURE0); // 啟用(使能)相應的紋理單元
GLES20.glGenTextures(1, textures, 0); // 產生紋理id
checkGlError("Texture generate");

GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]);
//通過紋理id,繫結到相應的紋理單元,紋理單元記憶體放的型別可以很多種,比如GLES20.GL_TEXTURE_1D、GLES20.GL_TEXTURE_2D、GLES20.GL_TEXTURE_3D、GLES11Ext.GL_TEXTURE_EXTERNAL_OES等
checkGlError("Texture bind");

videoTexture = new SurfaceTexture(textures[0]); // 通過建立的紋理id,生成SurfaceTexture
videoTexture.setOnFrameAvailableListener(this);
}

private void drawTexture() {
int mHProjMatrix=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix");
GLES20.glUniformMatrix4fv(mHProjMatrix,1,false,matrix,0);

int mHProjMatrix0=GLES20.glGetUniformLocation(shaderProgram,"uProjMatrix0");
GLES20.glUniformMatrix4fv(mHProjMatrix0,1,false,matrix0,0);

int mXyFlag = GLES20.glGetUniformLocation(shaderProgram, "xyFlag"); //映象型別: x映象,y映象---通過不同的變化矩陣與頂點位置向量進行左乘,如:uProjMatrix*vPosition;
GLES20.glUniform1i(mXyFlag, xyFlag);

int mColorFlagHandle = GLES20.glGetUniformLocation(shaderProgram, "colorFlag"); // 紋理操作型別(濾鏡處理):飽和度/灰度/冷暖色/放大鏡/模糊/美顏/紋理融合
GLES20.glUniform1i(mColorFlagHandle, mColorFlag);

//頂點屬性一般包括位置、顏色、法線、紋理座標屬性
GLES20.glEnableVertexAttribArray(positionHandle); // 使能相應的頂點位置屬性的頂點屬性陣列
GLES20.glVertexAttribPointer(positionHandle, 2, GLES20.GL_FLOAT, false, 0, vertexBuffer); // 指定(繫結)該相應的頂點位置屬性的頂點屬性陣列

GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, textures[0]); // 攝像頭影象紋理
GLES20.glUniform1i(textureParamHandle, 0);

GLES20.glActiveTexture(GLES20.GL_TEXTURE1);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureIdOne); // 貼圖的影象紋理
GLES20.glUniform1i(textureHandle, 1);

GLES20.glEnableVertexAttribArray(textureCoordinateHandle);
GLES20.glVertexAttribPointer(textureCoordinateHandle, 4, GLES20.GL_FLOAT, false, 0, textureBuffer);

GLES20.glUniformMatrix4fv(textureTranformHandle, 1, false, videoTextureTransform, 0); // GL_TEXTURE_EXTERNAL_OES紋理的變化矩
GLES20.glUniform1f(mRatio, ratio); // 紋理融合因子

GLES20.glDrawElements(GLES20.GL_TRIANGLE_STRIP, drawOrder.length, GLES20.GL_UNSIGNED_SHORT, drawListBuffer); // 根據頂點位置索引進行繪製片元
GLES20.glDisableVertexAttribArray(positionHandle);
GLES20.glDisableVertexAttribArray(textureCoordinateHandle);
}

public void checkGlError(String op) {
int error;
while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
Log.e("SurfaceTest", op + ": glError " + GLUtils.getEGLErrorString(error));
}
}

}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
demo效果:
紋理融合

放大鏡;

 

模糊:

暖色:

冷色:

美顏:

映象:

---------------------
作者:keen_zuxwang
來源:CSDN
原文:https://blog.csdn.net/keen_zuxwang/article/details/78363464
版權宣告:本文為博主原創文章,轉載請附上博文連結!