我正在使用ffmpeg在Android上使用rtsp播放器来连接和解码视频流。我想使用OpenGL es 2.0将YUV框架转换为RGB框架并显示,但我被阻止(这是我第一次使用opengl)。
我会尽量解释我的问题。
从NDK android我初始化一个opengl上下文(从线程我想用来显示图像)使用这种方法:
// EGLint attribs[] = { EGL_SURFACE_TYPE,EGL_WINDOW_BIT,EGL_RENDERABLE_TYPE,EGL_OPENGL_ES2_BIT,EGL_BLUE_SIZE,8,EGL_GREEN_SIZE,EGL_RED_SIZE,EGL_ALPHA_SIZE,EGL_NONE }; EGLint contextAttrs[] = { EGL_CONTEXT_CLIENT_VERSION,2,EGL_NONE }; LOGI("Initializing context"); if((display = eglGetdisplay(EGL_DEFAULT_disPLAY)) == EGL_NO_disPLAY) { closeContext(); return; } if(!eglInitialize(display,0)) { closeContext(); return; } if(!eglChooseConfig(display,attribs,&config,1,&numConfigs)) { closeContext(); return; } if(!eglGetConfigAttrib(display,config,EGL_NATIVE_VISUAL_ID,&format)) { closeContext(); return; } ANativeWindow_setBuffersGeometry(window,format); if(!(surface = eglCreateWindowSurface(display,window,0))) { closeContext(); return; } if(!(context = eglCreateContext(display,contextAttrs))) { closeContext(); return; } if(!eglMakeCurrent(display,surface,context)) { closeContext(); return; } if(!eglQuerySurface(display,EGL_WIDTH,&width) || !eglQuerySurface(display,EGL_HEIGHT,&height)) { closeContext(); return; } LOGI("EGLWIDTH : %d EGLHEIGHT : %d ",(int)width,(int)height); isInitEGLContext = 1;
然后我使用这种方法设置图形:
// //Load Vertex and Fragment Shader,attach shader and link program programId = createProgram(kVertexShader,kFragmentShader); LOGI("Program id : %d error : %d",(int) programId,glGetError()); if(!programId) { LOGI("Could not create program"); return; } // get index of the generic vertex attribute bound to vPosition positionObject = (int) glGetAttribLocation(programId,"vPosition"); // get index of the generic vertex attribute bound to vTexCoord texturePosition = (int) glGetAttribLocation(programId,"vTexCoord"); // get the location of yTexture within the program (corresponding to program id) yuv_texture_object[0] = glGetUniformlocation(programId,"yTexture"); // get the location of uTexture within the program yuv_texture_object[1] = glGetUniformlocation(programId,"uTexture"); // get the location of vTexture within the program yuv_texture_object[2] = glGetUniformlocation(programId,"vTexture"); // Setup width of each planes (display size) stream_yuv_width[0] = 800; stream_yuv_width[1] = 400; stream_yuv_width[2] = 400; // Setup height of each planes (display size) stream_yuv_height[0] = 600; stream_yuv_height[1] = 300; stream_yuv_height[2] = 300; //set the view port glViewport(0,stream_yuv_width[0],stream_yuv_height[0]); LOGI("glViewPort() %d ",glGetError());
我已经硬编码显示大小(现在),直到我得到一些工作。
createProgram方法,加载着色器,创建程序,编译和链接着色器成功。
这是我的着色器:
const char kVertexShader[] = "attribute vec4 vPosition;\n" "attribute vec2 vTexCoord;\n" "varying vec2 v_vTexCoord;\n" "void main() {\n" "gl_Position = vPosition;\n" "v_vTexCoord = vTexCoord;\n" "}\n"; const char kFragmentShader[] = "precision mediump float; \n" "varying vec2 v_vTexCoord;\n" "uniform sampler2D yTexture;\n" "uniform sampler2D uTexture;\n" "uniform sampler2D vTexture;\n" "void main() {\n" "float nx,ny; \n" "nx = v_vTexCoord.x; \n" "ny = v_vTexCoord.y; \n" "float y=texture2D(yTexture,v_vTexCoord).r;\n" "float u=texture2D(uTexture,vec2(nx / 2.0,ny / 2.0)).r;\n" "float v=texture2D(vTexture,ny / 2.0)).r;\n" "y = 1.1643 * (y - 0.0625);\n" "u = u - 0.5; \n" "v = v - 0.5; \n" "float r=y + 1.5958 * v;\n" "float g=y - 0.39173 * u - 0.81290 * v;\n" "float b=y + 2.017 * u;\n" "gl_FragColor = vec4(r,g,b,1.0);\n" "}\n"; const GLfloat kVertexinformation[] = { -1.0f,1.0f,// TexCoord 0 top left -1.0f,-1.0f,// TexCoord 1 bottom left 1.0f,// TexCoord 2 bottom right 1.0f,1.0f // TexCoord 3 top right }; const GLshort kTextureCoordinateinformation[] = { 0,// TexCoord 0 top left 0,// TexCoord 1 bottom left 1,// TexCoord 2 bottom right 1,0 // TexCoord 3 top right }; const gluint kStride = 0;//COORDS_PER_VERTEX * 4; const GLshort kIndicesinformation[] = { 0,3 };
然后我设置yuv纹理和渲染到纹理,此时yuv_width [i]和yuv_height [i]设置为正确的值:
void setupYUVTexture() { //Setup the pixel alignement glPixelStorei(GL_UNPACK_ALIGNMENT,1); LOGI("glPixelStorei() : %d ",glGetError()); int i = 0; for(i = 0 ; i < 3 ; ++i) { //Check if the texture already setup if(yuv_texture_id[i] != 0) { glDeleteTextures(1,&yuv_texture_id[i]); yuv_texture_id[i] = 0; } // Active the i texture glActiveTexture(GL_TEXTURE0 + i); //Generate the texture name glGenTextures(1,&yuv_texture_id[i]); // Bind the texture glBindTexture(GL_TEXTURE_2D,yuv_texture_id[i]); // Setup the texture parameters glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MIN_FILTER,GL_NEAREST); glTexParameteri(GL_TEXTURE_2D,GL_TEXTURE_MAG_FILTER,GL_NEAREST); glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_S,GL_CLAMP_TO_EDGE); glTexParameterf(GL_TEXTURE_2D,GL_TEXTURE_WRAP_T,GL_CLAMP_TO_EDGE); //Define the texture image glTexImage2D(GL_TEXTURE_2D,GL_luminance,yuv_width[i],yuv_height[i],GL_UNSIGNED_BYTE,NULL); LOGI("glTexImage2D() %d ",glGetError()); } } void renderToTexture() { // Generate framebuffer object name glGenFramebuffers(1,&frameBufferObject); //Bind the framebuffer glBindFramebuffer(GL_FRAMEBUFFER,frameBufferObject); //Generate render buffer object name glGenRenderbuffers(1,&renderBufferObject); //Bind render buffer glBindRenderbuffer(GL_RENDERBUFFER,renderBufferObject); //Create and initialize render buffer for display RGBA with the same size of the viewport glrenderbufferStorage(GL_RENDERBUFFER,GL_RGBA4,800,600); //Attach render buffer to frame buffer object glFramebufferRenderbuffer(GL_FRAMEBUFFER,GL_COLOR_ATTACHMENT0,GL_RENDERBUFFER,renderBufferObject); //Attach y plane to frame buffer object glFramebufferTexture2D(GL_FRAMEBUFFER,GL_TEXTURE_2D,yuv_texture_id[0],0); //Attach u plane to frame buffer object glFramebufferTexture2D(GL_FRAMEBUFFER,yuv_texture_id[1],0); //Attach v plane to frame buffer object glFramebufferTexture2D(GL_FRAMEBUFFER,yuv_texture_id[2],0); // Bind the framebuffer glBindFramebuffer(GL_FRAMEBUFFER,0); //Check if the framebuffer is correctly setup GLint status = glCheckFramebufferStatus(GL_FRAMEBUFFER); if(status != GL_FRAMEBUFFER_COMPLETE) { LOGI(" FBO setting fault : %d ",status); return; } }
要完成,我的画框方法:
void drawFrame() { LOGI("DrawFrame"); glBindFramebuffer(GL_FRAMEBUFFER,frameBufferObject); printGLError("glBindFramebuffer"); gluseProgram(programId); printGLError("gluseProgram"); int i = 0; for(i = 0 ; i < 3 ; ++i) { glActiveTexture(GL_TEXTURE0 + i); printGLError("glActiveTexture"); glBindTexture(GL_TEXTURE_2D,yuv_texture_object[i]); printGLError("glBindTexture"); gluniform1i(yuv_texture_object[i],i); printGLError("gluniform1i"); LOGI("Plan : %d Largeur : %d Hauteur : %d ",i,yuv_height[i]); glTexSubImage2D(GL_TEXTURE_2D,yuv_planes[i]); printGLError("glTexSubImage2D"); } glVertexAttribPointer(positionObject,GL_FLOAT,GL_FALSE,kStride,kVertexinformation); printGLError("glVertexAttribPointer"); glVertexAttribPointer(texturePosition,GL_SHORT,kTextureCoordinateinformation); printGLError("glVertexAttribPointer"); glEnabLevertexAttribArray(positionObject); printGLError("glVertexAttribArray"); glEnabLevertexAttribArray(texturePosition); printGLError("glVertexAttribArray"); glBindFramebuffer(GL_FRAMEBUFFER,0); printGLError("glBindFramebuffer"); glDrawElements(GL_TRIANGLE_STRIP,6,GL_UNSIGNED_SHORT,kIndicesinformation); printGLError("glDrawElements"); eglSwapBuffers(display,surface); printGLError("eglSwapBuffers"); }
一旦opengl纹理和其他必需的属性初始化,那么当一个帧被解码时,我将y缓冲区重新映射到yuv_planes [0]中的u缓冲区,yuv_planes [1]中的u缓冲区以及yuv_planes [2]中的v缓冲区。
一旦使用ffmpeg i正确地解码帧,我按以下顺序调用:
initContext()
– setupGraphics()
– setupYUVTexture()
– renderToTexture()
然后我调用drawFrame。当然,当一切都被初始化时,我在每个解码帧之后直接调用drawFrame。
有我现在的输出。
解决方法
版权声明:本文内容由互联网用户自发贡献,该文观点与技术仅代表作者本人。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如发现本站有涉嫌侵权/违法违规的内容, 请发送邮件至 dio@foxmail.com 举报,一经查实,本站将立刻删除。