Android byteco-javacpp-ffmpeg将h264字节解码为yuv,并使用openGL ES 2.0进



那里!我尝试显示一个视频流,它以字节数组的形式来自服务器。这个数组中的数据是h264编码的图像,我用byteco-javacpp预设库以这种方式对其进行解码:

public class DMDecoder {
private static final String LOG_TAG = "DMDecoder";
private AVCodec avCodec;
private AVCodecContext avCodecContext;
private AVFrame avFrame;
private AVPacket avPacket;
private boolean wasIFrame;
private long IFrameTimeStampMs;
private int maxFps;
private int codecId;
private DMDecoderCallback callback;
public DMDecoder(DMDecoderCallback cb) {
    this.callback = cb;
    this.codecId = AV_CODEC_ID_H264;
    avcodec_register_all();
    restart();
}
public void restart() {
    stop();
    start();
}
public void stop() {
    frames = 0;
    if (avCodecContext != null) {
        avcodec_close(avCodecContext);
        avcodec_free_context(avCodecContext);
        avCodecContext = null;
    }
    if (avCodec != null) {
        av_free(avCodec);
        avCodec = null;
    }
    if (avFrame != null) {
        av_frame_free(avFrame);
        avFrame = null;
    }
    if (avPacket != null) {
        av_free_packet(avPacket);
        avPacket = null;
    }
}
public void start() {
    avCodec = avcodec_find_decoder(codecId);
    avCodecContext = avcodec_alloc_context3(avCodec);
    AVDictionary opts = new AVDictionary();
    avcodec_open2(avCodecContext, avCodec, opts);
    avFrame = av_frame_alloc();
    avPacket = new AVPacket();
    av_init_packet(avPacket);
}
public VideoFrame decode(byte[] data, int dataOffset, int dataSize) {
    avPacket.pts(AV_NOPTS_VALUE);
    avPacket.dts(AV_NOPTS_VALUE);
    avPacket.data(new BytePointer(data).position(dataOffset));
    avPacket.size(dataSize);
    avPacket.pos(-1);
    IntBuffer gotPicture = IntBuffer.allocate(1);
    int processedBytes = avcodec_decode_video2(
            avCodecContext, avFrame, gotPicture, avPacket);
    if (avFrame.width() == 0 || avFrame.height() == 0) return null;
    VideoFrame frame = new VideoFrame();
   frame.colorPlane0 = new byte[avFrame.width() * avFrame.height()];
   frame.colorPlane1 = new byte[avFrame.width() / 2 * avFrame.height() / 2];
   frame.colorPlane2 = new byte[avFrame.width() / 2 * avFrame.height() / 2];
    if (avFrame.data(0) != null) avFrame.data(0).get(frame.colorPlane0);
    if (avFrame.data(1) != null) avFrame.data(1).get(frame.colorPlane1);
    if (avFrame.data(2) != null) avFrame.data(2).get(frame.colorPlane2);
    frame.lineSize0 = avFrame.width();
    frame.lineSize1 = avFrame.width() / 2;
    frame.lineSize2 = avFrame.width() / 2;
    frame.width = avFrame.width();
    frame.height = avFrame.height();
    return frame;
  }
}

VideoFrame类只是简单的POJO:

public class VideoFrame {
    public byte[] colorPlane0;
    public byte[] colorPlane1;
    public byte[] colorPlane2;
    public int lineSize0;
    public int lineSize1;
    public int lineSize2;
    public int width;
    public int height;
    public long presentationTime;
}

解码后,我将此帧发送到我的GLRenderer类

public class GLRenderer implements GLSurfaceView.Renderer {
    private static final String LOG_TAG = "GLRenderer";
    private TexturePlane plane;
    private ConcurrentLinkedQueue<VideoFrame> frames;
    private int maxFps = 30;
    private VideoFrame currentFrame;
    private long startTime, endTime;
    private int viewWidth, viewHeight;
    private boolean isFirstFrameProcessed;
    public GLRenderer(int viewWidth, int viewHeight) {
        frames = new ConcurrentLinkedQueue<>();
        this.viewWidth = viewWidth;
        this.viewHeight = viewHeight;
    }
    // mMVPMatrix is an abbreviation for "Model View Projection Matrix"
    private final float[] mMVPMatrix = new float[16];
    private final float[] mProjectionMatrix = new float[16];
    private final float[] mViewMatrix = new float[16];
    @Override
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        // Set the background frame color
        GLES20.glClearColor(0.1f, 0.1f, 0.1f, 1.0f);
        plane = new TexturePlane();
    }
    public void setMaxFps(int maxFps) {
        this.maxFps = maxFps;
    }
    @Override
    public void onDrawFrame(GL10 unused) {

        // Draw background color
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        // Set the camera position (View matrix)
        Matrix.setLookAtM(mViewMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f);
        // Calculate the projection and view transformation
        Matrix.multiplyMM(mMVPMatrix, 0, mProjectionMatrix, 0, mViewMatrix, 0);
        if (!isFirstFrameProcessed) checkViewPort(viewWidth, viewHeight);
        if (maxFps > 0 && startTime > 0) {
            endTime = System.currentTimeMillis();
            long time = endTime - startTime;
            //
            long wantedTime = 1000 / maxFps;
            //
            long wait;
            if (time < wantedTime) {
                wait = wantedTime - time;
                //
                try {
                    Thread.sleep(wait);
                } catch (InterruptedException e) {
                    Log.e(LOG_TAG, "thread interrupted exception");
                }
            }
        }
        startTime = System.currentTimeMillis();
        tick();
        plane.draw(mMVPMatrix);
    }
    private void updateFrame(VideoFrame frame) {
        plane.updateTexture(frame.colorPlane0, frame.width, frame.height, 0);
        plane.updateTexture(frame.colorPlane1, frame.width / 2, frame.height / 2, 1);
        plane.updateTexture(frame.colorPlane2, frame.width / 2, frame.height / 2, 2);
        plane.setTextureWidth(frame.width);
        plane.setTextureHeight(frame.height);
    }
    private void tick() {
        if (frames.isEmpty()) return;
        VideoFrame frame = frames.peek();
        if (frame == null) return;
        long tms = System.currentTimeMillis();
        if (frame.presentationTime <= tms) {
            updateFrame(frame);
            currentFrame = frame;
            frames.remove(frame);
        }
    }
    @Override
    public void onSurfaceChanged(GL10 unused, int width, int height) {
        checkViewPort(width, height);
        viewWidth = width;
        viewHeight = height;
        plane.setTextureWidth(width);
        plane.setTextureHeight(height);
    }
    private void checkViewPort(int width, int height) {
        float viewRatio = (float) width / height;
        if (currentFrame != null) {
            float targetRatio = (float) currentFrame.width / currentFrame.height;
            int x, y, newWidth, newHeight;
            if (targetRatio > viewRatio) {
                newWidth = width;
                newHeight = (int) (width / targetRatio);
                x = 0;
                y = (height - newHeight) / 2;
            } else {
                newHeight = height;
                newWidth = (int) (height * targetRatio);
                y = 0;
                x = (width - newWidth) / 2;
            }
            GLES20.glViewport(x, y, newWidth, newHeight);
        } else {
            GLES20.glViewport(0, 0, width, height);
        }
        Matrix.frustumM(mProjectionMatrix, 0, 1, -1, -1, 1, 3, 4);
    }
    public void addFrame(VideoFrame frame) {
        if (frame != null) {
            frames.add(frame);
        }
    }
}

GLRenderer使用简单的openGL多边形,我在上面绘制所有纹理

    public class TexturePlane {
    private static final String LOG_TAG = "TexturePlane";
    private final String vertexShaderCode = "" +
    "uniform mat4 uMVPMatrix;" +
    "attribute vec4 vPosition;" +
    "attribute vec2 a_TexCoordinate;" +
    "varying vec2 v_TexCoordinate;" +
    "void main() {" +
    "  gl_Position = uMVPMatrix * vPosition;" +
    "  v_TexCoordinate = a_TexCoordinate;" +
    "}";
    private final String fragmentShaderCode = "" +
    "precision mediump float;" +
    "varying vec2 v_TexCoordinate;" +
    "uniform sampler2D s_texture_y;" +
    "uniform sampler2D s_texture_u;" +
    "uniform sampler2D s_texture_v;" +
    "void main() {" +
    "   float y = texture2D(s_texture_y, v_TexCoordinate).r;" +
    "   float u = texture2D(s_texture_u, v_TexCoordinate).r - 0.5;" +
    "   float v = texture2D(s_texture_v, v_TexCoordinate).r - 0.5;" +
    "   float r = y + 1.13983 * v;" +
    "   float g = y - 0.39465 * u - 0.58060 * v;" +
    "   float b = y + 2.03211 * u;" +
    "   gl_FragColor = vec4(r, g, b, 1.0);" +
    "}";
    private final FloatBuffer vertexBuffer;
    private final FloatBuffer textureBuffer;
    private final ShortBuffer drawListBuffer;
    private final int mProgram;
    private int mPositionHandle;
    private int mMVPMatrixHandle;
        // number of coordinates per vertex in this array
    private static final int COORDS_PER_VERTEX = 3;
    private static final int COORDS_PER_TEXTURE = 2;
    private static float squareCoords[] = {
        -1f, 1f, 0.0f,
        -1f, -1f, 0.0f,
        1f, -1f, 0.0f,
        1f, 1f, 0.0f
    };
    private static float uvs[] = {
        0.0f, 0.0f,
        0.0f, 1.0f,
        1.0f, 1.0f,
        1.0f, 0.0f
    };
    private final short drawOrder[] = {0, 1, 2, 0, 2, 3}; // order to draw vertices
    private final int vertexStride = COORDS_PER_VERTEX * 4; // 4 bytes per vertex
    private int textureWidth = 640;
    private int textureHeight = 480;
    private int yTextureUniformHandle;
    private int uTextureUniformHandle;
    private int vTextureUniformHandle;
    private int yTextureHandle;
    private int uTextureHandle;
    private int vTextureHandle;
    private int mTextureCoordinateHandle;
    public void setTextureWidth(int textureWidth) {
        this.textureWidth = textureWidth;
    }
    public int getTextureWidth() {
        return textureWidth;
    }
    public void setTextureHeight(int textureHeight) {
        this.textureHeight = textureHeight;
    }
    public int getTextureHeight() {
        return textureHeight;
    }
    /**
     * Sets up the drawing object data for use in an OpenGL ES context.
     */
    public TexturePlane() {
            // initialize vertex byte buffer for shape coordinates
        ByteBuffer bb = ByteBuffer.allocateDirect(squareCoords.length * 4);
        bb.order(ByteOrder.nativeOrder());
        vertexBuffer = bb.asFloatBuffer();
        vertexBuffer.put(squareCoords);
        vertexBuffer.position(0);
            // initialize byte buffer for the draw list
        ByteBuffer dlb = ByteBuffer.allocateDirect(drawOrder.length * 2);
        dlb.order(ByteOrder.nativeOrder());
        drawListBuffer = dlb.asShortBuffer();
        drawListBuffer.put(drawOrder);
        drawListBuffer.position(0);
            // initialize byte buffer for the draw list
        ByteBuffer tbb = ByteBuffer.allocateDirect(uvs.length * 4);
        tbb.order(ByteOrder.nativeOrder());
        textureBuffer = tbb.asFloatBuffer();
        textureBuffer.put(uvs);
        textureBuffer.position(0);
            mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
            compileShaders();
            setupTextures();
        }
        public void setupTextures() {
            yTextureHandle = setupTexture(null, textureWidth, textureHeight, 0);
            uTextureHandle = setupTexture(null, textureWidth, textureHeight, 1);
            vTextureHandle = setupTexture(null, textureWidth, textureHeight, 2);
        }
        public int setupTexture(ByteBuffer data, int width, int height, int index) {
            final int[] textureHandle = new int[1];
            GLES20.glGenTextures(1, textureHandle, 0);
            if (textureHandle[0] != 0) {
                    // Bind to the texture in OpenGL
                GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + index);
                GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle[0]);
                updateTexture(data, width, height, index);
                    // Set filtering
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
                    // Set wrapping mode
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT);
                GLES20.glTexParameterf(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT);
            }
            if (textureHandle[0] == 0) {
                Log.e(LOG_TAG, "Error loading texture.");
            }
            return textureHandle[0];
        }
        public void updateTexture(byte[] data, int width, int height, int index) {
            if (data == null) {
                if (width == 0 || height == 0) {
                    width = textureWidth;
                    height = textureHeight;
                }
                data = new byte[width * height];
                if (index == 0) {
                    Arrays.fill(data, y);
                } else if (index == 1) {
                    Arrays.fill(data, u);
                } else {
                    Arrays.fill(data, v);
                }
            } 
            byteBuffer.wrap(data);
            byteBuffer.position(0);
            GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + index);
            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
                width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, byteBuffer);
        }
        private void compileShaders() {
            // prepare shaders and OpenGL program
            int vertexShader = loadShader(
                GLES20.GL_VERTEX_SHADER,
                vertexShaderCode);
            int fragmentShader = loadShader(
                GLES20.GL_FRAGMENT_SHADER,
                fragmentShaderCode);
            GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
            GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
            GLES20.glLinkProgram(mProgram);                  // create OpenGL program executables
            checkGlError("glLinkProgram");
            // Add program to OpenGL environment
            GLES20.glUseProgram(mProgram);
            mPositionHandle = GLES20.glGetAttribLocation(mProgram, "vPosition");
            mTextureCoordinateHandle = GLES20.glGetAttribLocation(mProgram, "a_TexCoordinate");
            GLES20.glEnableVertexAttribArray(mPositionHandle);
            GLES20.glEnableVertexAttribArray(mTextureCoordinateHandle);
            yTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "s_texture_y");
            uTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "s_Texture_u");
            vTextureUniformHandle = GLES20.glGetUniformLocation(mProgram, "s_Texture_v");
            mMVPMatrixHandle = GLES20.glGetUniformLocation(mProgram, "uMVPMatrix");
            checkGlError("glGetUniformLocation");
        }
    /**
     * Utility method for compiling a OpenGL shader.
     * <p/>
     * <p><strong>Note:</strong> When developing shaders, use the checkGlError()
     * method to debug shader coding errors.</p>
     *
     * @param type       - Vertex or fragment shader type.
     * @param shaderCode - String containing the shader code.
     * @return - Returns an id for the shader.
     */
    public int loadShader(int type, String shaderCode) {
            // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
            // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
        int shader = GLES20.glCreateShader(type);
            // add the source code to the shader and compile it
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }
    /**
     * Utility method for debugging OpenGL calls. Provide the name of the call
     * just after making it:
     * <p/>
     * <pre>
     * mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor");
     * MyGLRenderer.checkGlError("glGetUniformLocation");</pre>
     *
     * If the operation is not successful, the check throws an error.
     *
     * @param glOperation - Name of the OpenGL call to check.
     */
    public void checkGlError(String glOperation) {
        int error;
        String errorString;
        while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) {
            errorString = GLU.gluErrorString(error);
            String message = glOperation + ": glError " + error + ": " + errorString;
            Log.e(LOG_TAG, message);
            throw new RuntimeException(message);
        }
    }
    public void draw(float[] mvpMatrix) {
            // Prepare the triangle coordinate data
        GLES20.glVertexAttribPointer(
            mPositionHandle, COORDS_PER_VERTEX,
            GLES20.GL_FLOAT, false,
            vertexStride, vertexBuffer);
        GLES20.glVertexAttribPointer(
            mTextureCoordinateHandle, COORDS_PER_TEXTURE,
            GLES20.GL_FLOAT, false,
            0, textureBuffer);
        GLES20.glUniformMatrix4fv(mMVPMatrixHandle, 1, false, mvpMatrix, 0);
        checkGlError("glUniformMatrix4fv");
        GLES20.glUniform1i(yTextureUniformHandle, 0);
        GLES20.glUniform1i(uTextureUniformHandle, 1);
        GLES20.glUniform1i(vTextureUniformHandle, 2);
            // Draw the square
        GLES20.glDrawElements(
            GLES20.GL_TRIANGLES, drawOrder.length,
            GLES20.GL_UNSIGNED_SHORT, drawListBuffer);
    }
} 

但我有个问题。我的GL表面显示的图像颜色错误。图像

我做错了什么?

更新:

正如Ronald S.Bultje所说,我在代码中添加了glBindTexture(…(函数。现在updateTexture(…(方法看起来是这样的:

public void updateTexture(byte[] data, int width, int height, int index) {
    if (data == null) {
        if (width == 0 || height == 0) {
            width = textureWidth;
            height = textureHeight;
        }
        data = new byte[width * height];
        if (index == 0) {
            Arrays.fill(data, y);
        } else if (index == 1) {
            Arrays.fill(data, u);
        } else {
            Arrays.fill(data, v);
        }
    } 
    byteBuffer.wrap(data);
    byteBuffer.position(0);
    GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + index);
    int textureHandle = index == 0 ? yTextureHandle : index == 1 ? uTextureHandle : vTextureHandle;
    GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureHandle);
    GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE,
        width, height, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, byteBuffer);
}

updateTexture((函数不调用GLES20.glBindTexture(GLES20.GL_TEXTURE_2D,textureHandle[index](;调用GLES20.glActiveTexture(GLES20.GL_TEXTURE0+索引(后;

[edit]实际上给定了你的代码,它会是index==0吗?yTextureHandle:index==1?uTextureHandle?vTextureHandle,我相信你可以弄清楚如何重构你的代码,让它变得更容易。

相关内容

  • 没有找到相关文章

最新更新