Android上使用OpenGLES2.0显示YUV数据

论坛 期权论坛 脚本     
匿名网站用户   2020-12-19 17:01   11   0

转自:http://blog.csdn.net/ueryueryuery/article/details/17608185


在Android上用OpenGLES来显示YUV图像,之所以这样做,是因为:

1.Android本身也不能直接显示YUV图像,YUV转成RGB还是必要的;

2.YUV手动转RGB会占用大量的CPU资源,如果以这样的形式播放视频,手机会很热,所以我们尽量让GPU来做这件事;

3.OpenGLES是Android集成到自身框架里的第三方库,它有很多的可取之处。


博主的C/C++不是很好,所以整个过程是在Java层实现的,大家见笑,我主要参考(但不限于)以下文章,十分感谢这些朋友的分享:

1. http://blog.csdn.net/xiaoguaihai/article/details/8672631

2.http://chenshun87.blog.163.com/blog/static/18859389201232011727615/

3.http://blog.csdn.net/ypist/article/details/8950903

4.http://blog.csdn.net/wanglang3081/article/details/8480281

5.http://blog.csdn.net/xdljf/article/details/7178620


一、首先我先说一下这个解决方案是怎么运行的,给大家一个概念

1.显示在哪 -> GLSurfaceVIew

2.谁来把数据贴到GLSurfaceVIew上 -> Renderer

3.谁来负责YUV数据转换成RGB -> GL中的Program/Shader

一句话说明白就是:GL的Program/Shader把用户传过来的YUV数据,转换成RGB数据后,通过Renderer贴在GLSurfaceView上。


二、怎么检查你的手机是不是支持GLES2.0呢,使用下面的代码段就行了:

  1. public static boolean detectOpenGLES20(Context context) {
  2. ActivityManager am = (ActivityManager) context.getSystemService(Context.ACTIVITY_SERVICE);
  3. ConfigurationInfo info = am.getDeviceConfigurationInfo();
  4. return (info.reqGlEsVersion >= 0x20000);
  5. }
一般的手机,都是会支持GLES2.0的,大家不必担心。


三、开搞


A 先要有一个GLSurfaceView,把它放入你的布局中就好了。

找到这个家伙,对它进行简单的设置,并为它设置一个Renderer。

Renderer的作用就是在GLSurfaceView上画出图像。

  1. mGLSurface = (GLFrameSurface) findViewById(R.id.glsurface);
  2. mGLSurface.setEGLContextClientVersion(2);
  3. mGLFRenderer = new GLFrameRenderer(this, mGLSurface);
  4. mGLSurface.setRenderer(mGLFRenderer);

B 再就是看下GLFrameRenderer怎么来写了

  1. public class GLFrameRenderer implements Renderer {
  2. private ISimplePlayer mParentAct; //请无视之
  3. private GLSurfaceView mTargetSurface;
  4. private GLProgram prog = new GLProgram(0);
  5. private int mVideoWidth = -1, mVideoHeight = -1;
  6. private ByteBuffer y;
  7. private ByteBuffer u;
  8. private ByteBuffer v;
  9. public GLFrameRenderer(ISimplePlayer callback, GLSurfaceView surface) {
  10. mParentAct = callback; //请无视之
  11. mTargetSurface = surface;
  12. }
  13. @Override
  14. public void onSurfaceCreated(GL10 gl, EGLConfig config) {
  15. Utils.LOGD("GLFrameRenderer :: onSurfaceCreated");
  16. if (!prog.isProgramBuilt()) {
  17. prog.buildProgram();
  18. Utils.LOGD("GLFrameRenderer :: buildProgram done");
  19. }
  20. }
  21. @Override
  22. public void onSurfaceChanged(GL10 gl, int width, int height) {
  23. Utils.LOGD("GLFrameRenderer :: onSurfaceChanged");
  24. GLES20.glViewport(0, 0, width, height);
  25. }
  26. @Override
  27. public void onDrawFrame(GL10 gl) {
  28. synchronized (this) {
  29. if (y != null) {
  30. // reset position, have to be done
  31. y.position(0);
  32. u.position(0);
  33. v.position(0);
  34. prog.buildTextures(y, u, v, mVideoWidth, mVideoHeight);
  35. GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
  36. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
  37. prog.drawFrame();
  38. }
  39. }
  40. }
  41. /**
  42. * this method will be called from native code, it happens when the video is about to play or
  43. * the video size changes.
  44. */
  45. public void update(int w, int h) {
  46. Utils.LOGD("INIT E");
  47. if (w > 0 && h > 0) {
  48. if (w != mVideoWidth && h != mVideoHeight) {
  49. this.mVideoWidth = w;
  50. this.mVideoHeight = h;
  51. int yarraySize = w * h;
  52. int uvarraySize = yarraySize / 4;
  53. synchronized (this) {
  54. y = ByteBuffer.allocate(yarraySize);
  55. u = ByteBuffer.allocate(uvarraySize);
  56. v = ByteBuffer.allocate(uvarraySize);
  57. }
  58. }
  59. }
  60. mParentAct.onPlayStart(); //请无视之
  61. Utils.LOGD("INIT X");
  62. }
  63. /**
  64. * this method will be called from native code, it's used for passing yuv data to me.
  65. */
  66. public void update(byte[] ydata, byte[] udata, byte[] vdata) {
  67. synchronized (this) {
  68. y.clear();
  69. u.clear();
  70. v.clear();
  71. y.put(ydata, 0, ydata.length);
  72. u.put(udata, 0, udata.length);
  73. v.put(vdata, 0, vdata.length);
  74. }
  75. // request to render
  76. mTargetSurface.requestRender();
  77. }
  78. }

代码很简单,Renderer主要处理这么几个事:

1.Surface create的时候,我初始化了一些需要用到的Program/Shader,因为马上就要用到它们了;

2.Surface change的时候,重置一下画面;

3.onDrawFrame()时,把数据真正地“画”上去;

4.至于两个update方法,是用来把图像的宽高/数据传过来的。


C 看GLProgram是怎么写的,它的作用是向Renderer提供计算单元,你所有对数据的处理,都在这儿了。

  1. public boolean isProgramBuilt() {
  2. return isProgBuilt;
  3. }
  4. public void buildProgram() {
  5. createBuffers(_vertices, coordVertices);
  6. if (_program <= 0) {
  7. _program = createProgram(VERTEX_SHADER, FRAGMENT_SHADER);
  8. }
  9. Utils.LOGD("_program = " + _program);
  10. /*
  11. * get handle for "vPosition" and "a_texCoord"
  12. */
  13. _positionHandle = GLES20.glGetAttribLocation(_program, "vPosition");
  14. Utils.LOGD("_positionHandle = " + _positionHandle);
  15. checkGlError("glGetAttribLocation vPosition");
  16. if (_positionHandle == -1) {
  17. throw new RuntimeException("Could not get attribute location for vPosition");
  18. }
  19. _coordHandle = GLES20.glGetAttribLocation(_program, "a_texCoord");
  20. Utils.LOGD("_coordHandle = " + _coordHandle);
  21. checkGlError("glGetAttribLocation a_texCoord");
  22. if (_coordHandle == -1) {
  23. throw new RuntimeException("Could not get attribute location for a_texCoord");
  24. }
  25. /*
  26. * get uniform location for y/u/v, we pass data through these uniforms
  27. */
  28. _yhandle = GLES20.glGetUniformLocation(_program, "tex_y");
  29. Utils.LOGD("_yhandle = " + _yhandle);
  30. checkGlError("glGetUniformLocation tex_y");
  31. if (_yhandle == -1) {
  32. throw new RuntimeException("Could not get uniform location for tex_y");
  33. }
  34. _uhandle = GLES20.glGetUniformLocation(_program, "tex_u");
  35. Utils.LOGD("_uhandle = " + _uhandle);
  36. checkGlError("glGetUniformLocation tex_u");
  37. if (_uhandle == -1) {
  38. throw new RuntimeException("Could not get uniform location for tex_u");
  39. }
  40. _vhandle = GLES20.glGetUniformLocation(_program, "tex_v");
  41. Utils.LOGD("_vhandle = " + _vhandle);
  42. checkGlError("glGetUniformLocation tex_v");
  43. if (_vhandle == -1) {
  44. throw new RuntimeException("Could not get uniform location for tex_v");
  45. }
  46. isProgBuilt = true;
  47. }
  48. /**
  49. * build a set of textures, one for Y, one for U, and one for V.
  50. */
  51. public void buildTextures(Buffer y, Buffer u, Buffer v, int width, int height) {
  52. boolean videoSizeChanged = (width != _video_width || height != _video_height);
  53. if (videoSizeChanged) {
  54. _video_width = width;
  55. _video_height = height;
  56. Utils.LOGD("buildTextures videoSizeChanged: w=" + _video_width + " h=" + _video_height);
  57. }
  58. // building texture for Y data
  59. if (_ytid < 0 || videoSizeChanged) {
  60. if (_ytid >= 0) {
  61. Utils.LOGD("glDeleteTextures Y");
  62. GLES20.glDeleteTextures(1, new int[] { _ytid }, 0);
  63. checkGlError("glDeleteTextures");
  64. }
  65. // GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
  66. int[] textures = new int[1];
  67. GLES20.glGenTextures(1, textures, 0);
  68. checkGlError("glGenTextures");
  69. _ytid = textures[0];
  70. Utils.LOGD("glGenTextures Y = " + _ytid);
  71. }
  72. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _ytid);
  73. checkGlError("glBindTexture");
  74. GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width, _video_height, 0,
  75. GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y);
  76. checkGlError("glTexImage2D");
  77. GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
  78. GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
  79. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
  80. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
  81. // building texture for U data
  82. if (_utid < 0 || videoSizeChanged) {
  83. if (_utid >= 0) {
  84. Utils.LOGD("glDeleteTextures U");
  85. GLES20.glDeleteTextures(1, new int[] { _utid }, 0);
  86. checkGlError("glDeleteTextures");
  87. }
  88. int[] textures = new int[1];
  89. GLES20.glGenTextures(1, textures, 0);
  90. checkGlError("glGenTextures");
  91. _utid = textures[0];
  92. Utils.LOGD("glGenTextures U = " + _utid);
  93. }
  94. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _utid);
  95. GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width / 2, _video_height / 2, 0,
  96. GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u);
  97. GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
  98. GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
  99. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
  100. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
  101. // building texture for V data
  102. if (_vtid < 0 || videoSizeChanged) {
  103. if (_vtid >= 0) {
  104. Utils.LOGD("glDeleteTextures V");
  105. GLES20.glDeleteTextures(1, new int[] { _vtid }, 0);
  106. checkGlError("glDeleteTextures");
  107. }
  108. int[] textures = new int[1];
  109. GLES20.glGenTextures(1, textures, 0);
  110. checkGlError("glGenTextures");
  111. _vtid = textures[0];
  112. Utils.LOGD("glGenTextures V = " + _vtid);
  113. }
  114. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _vtid);
  115. GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, _video_width / 2, _video_height / 2, 0,
  116. GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v);
  117. GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
  118. GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
  119. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
  120. GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
  121. }
  122. /**
  123. * render the frame
  124. * the YUV data will be converted to RGB by shader.
  125. */
  126. public void drawFrame() {
  127. GLES20.glUseProgram(_program);
  128. checkGlError("glUseProgram");
  129. GLES20.glVertexAttribPointer(_positionHandle, 2, GLES20.GL_FLOAT, false, 8, _vertice_buffer);
  130. checkGlError("glVertexAttribPointer mPositionHandle");
  131. GLES20.glEnableVertexAttribArray(_positionHandle);
  132. GLES20.glVertexAttribPointer(_coordHandle, 2, GLES20.GL_FLOAT, false, 8, _coord_buffer);
  133. checkGlError("glVertexAttribPointer maTextureHandle");
  134. GLES20.glEnableVertexAttribArray(_coordHandle);
  135. // bind textures
  136. GLES20.glActiveTexture(_textureI);
  137. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _ytid);
  138. GLES20.glUniform1i(_yhandle, _tIindex);
  139. GLES20.glActiveTexture(_textureII);
  140. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _utid);
  141. GLES20.glUniform1i(_uhandle, _tIIindex);
  142. GLES20.glActiveTexture(_textureIII);
  143. GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, _vtid);
  144. GLES20.glUniform1i(_vhandle, _tIIIindex);
  145. GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
  146. GLES20.glFinish();
  147. GLES20.glDisableVertexAttribArray(_positionHandle);
  148. GLES20.glDisableVertexAttribArray(_coordHandle);
  149. }
  150. /**
  151. * create program and load shaders, fragment shader is very important.
  152. */
  153. public int createProgram(String vertexSource, String fragmentSource) {
  154. // create shaders
  155. int vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, vertexSource);
  156. int pixelShader = loadShader(GLES20.GL_FRAGMENT_SHADER, fragmentSource);
  157. // just check
  158. Utils.LOGD("vertexShader = " + vertexShader);
  159. Utils.LOGD("pixelShader = " + pixelShader);
  160. int program = GLES20.glCreateProgram();
  161. if (program != 0) {
  162. GLES20.glAttachShader(program, vertexShader);
  163. checkGlError("glAttachShader");
  164. GLES20.glAttachShader(program, pixelShader);
  165. checkGlError("glAttachShader");
  166. GLES20.glLinkProgram(program);
  167. int[] linkStatus = new int[1];
  168. GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, linkStatus, 0);
  169. if (linkStatus[0] != GLES20.GL_TRUE) {
  170. Utils.LOGE("Could not link program: ", null);
  171. Utils.LOGE(GLES20.glGetProgramInfoLog(program), null);
  172. GLES20.glDeleteProgram(program);
  173. program = 0;
  174. }
  175. }
  176. return program;
  177. }
  178. /**
  179. * create shader with given source.
  180. */
  181. private int loadShader(int shaderType, String source) {
  182. int shader = GLES20.glCreateShader(shaderType);
  183. if (shader != 0) {
  184. GLES20.glShaderSource(shader, source);
  185. GLES20.glCompileShader(shader);
  186. int[] compiled = new int[1];
  187. GLES20.glGetShaderiv(shader, GLES20.GL_COMPILE_STATUS, compiled, 0);
  188. if (compiled[0] == 0) {
  189. Utils.LOGE("Could not compile shader " + shaderType + ":", null);
  190. Utils.LOGE(GLES20.glGetShaderInfoLog(shader), null);
  191. GLES20.glDeleteShader(shader);
  192. shader = 0;
  193. }
  194. }
  195. return shader;
  196. }
  197. /**
  198. * these two buffers are used for holding vertices, screen vertices and texture vertices.
  199. */
  200. private void createBuffers(float[] vert, float[] coord) {
  201. _vertice_buffer = ByteBuffer.allocateDirect(vert.length * 4);
  202. _vertice_buffer.order(ByteOrder.nativeOrder());
  203. _vertice_buffer.asFloatBuffer().put(vert);
  204. _vertice_buffer.position(0);
  205. if (_coord_buffer == null) {
  206. _coord_buffer = ByteBuffer.allocateDirect(coord.length * 4);
  207. _coord_buffer.order(ByteOrder.nativeOrder());
  208. _coord_buffer.asFloatBuffer().put(coord);
  209. _coord_buffer.position(0);
  210. }
  211. }
  212. private void checkGlError(String op) {
  213. int error;
  214. while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
  215. Utils.LOGE("***** " + op + ": glError " + error, null);
  216. throw new RuntimeException(op + ": glError " + error);
  217. }
  218. }
  219. private static float[] squareVertices = { -1.0f, -1.0f, 1.0f, -1.0f, -1.0f, 1.0f, 1.0f, 1.0f, }; // fullscreen
  220. private static float[] coordVertices = { 0.0f, 1.0f, 1.0f, 1.0f, 0.0f, 0.0f, 1.0f, 0.0f, };// whole-texture
  221. private static final String VERTEX_SHADER = "attribute vec4 vPosition;\n" + "attribute vec2 a_texCoord;\n"
  222. + "varying vec2 tc;\n" + "void main() {\n" + "gl_Position = vPosition;\n" + "tc = a_texCoord;\n" + "}\n";
  223. private static final String FRAGMENT_SHADER = "precision mediump float;\n" + "uniform sampler2D tex_y;\n"
  224. + "uniform sampler2D tex_u;\n" + "uniform sampler2D tex_v;\n" + "varying vec2 tc;\n" + "void main() {\n"
  225. + "vec4 c = vec4((texture2D(tex_y, tc).r - 16./255.) * 1.164);\n"
  226. + "vec4 U = vec4(texture2D(tex_u, tc).r - 128./255.);\n"
  227. + "vec4 V = vec4(texture2D(tex_v, tc).r - 128./255.);\n" + "c += V * vec4(1.596, -0.813, 0, 0);\n"
  228. + "c += U * vec4(0, -0.392, 2.017, 0);\n" + "c.a = 1.0;\n" + "gl_FragColor = c;\n" + "}\n";

这里面代码比较复杂,我在这里稍作解释:

1.首先,buildProgram()目的要生成一个program,作用是用来将YUV->RGB,其中用到了2个shader(shader就相当于一个小运算器,它运行一段代码),第1个shader运行VERTEX_SHADER里的代码,目的是将坐标作为参数传入第2个shader;第2个shader来做YUV->RGB的运算。

2.buildTextures()是要生成3个贴图,分别为了显示R/G/B数据,三个贴图重合在一起,显示出来的就是彩色的图片。

3.drawFrame()是使用program来做运算,并真正去做画这个动作了。

分享到 :
0 人收藏
您需要登录后才可以回帖 登录 | 立即注册

本版积分规则

积分:1136255
帖子:227251
精华:0
期权论坛 期权论坛
发布
内容

下载期权论坛手机APP