OnFrameAvailable回调未到来



我是安卓应用程序的新手,正在尝试使用SurfaceTexture拍摄相机。未调用对OnFrameAvailable()的回调。。。请给我一个解决方案。代码如下。

这里面缺少什么?我不确定我是否给setOnFrameListener()打了正确的电话。

package com.example.cameratest;
import com.example.test.R;
import android.app.Activity;
import android.content.Intent;
import android.os.Bundle;
import android.view.Menu;
import android.view.View;

import android.graphics.SurfaceTexture;
import android.graphics.SurfaceTexture.OnFrameAvailableListener;
import android.hardware.Camera;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaFormat;
import android.media.MediaMuxer;
import android.opengl.*;
import android.util.Log;
import android.view.Surface;

import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.concurrent.locks.ReentrantLock;
public class MainActivity extends Activity implements OnFrameAvailableListener {
    private static final String TAG = "CameraToMpegTest";
    private static final boolean VERBOSE = true;           // lots of logging
    // where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission)
    private static final long DURATION_SEC = 8;
    // camera state
    private Camera mCamera;
    private static SurfaceTexture mSurfaceTexture;
    private int[] mGlTextures = null;
    private Object mFrameSyncObject = new Object();
    private boolean mFrameAvailable = false;
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
    }
    public void startCamera(View v) {
        try {
            this.initCamera(0);
            this.StartCamera();

        } catch (Throwable throwable) {
            throwable.printStackTrace();
        }
    }
    private void StartCamera() {
        try {
            mCamera.startPreview();
            long startWhen = System.nanoTime();
            long desiredEnd = startWhen + DURATION_SEC * 1000000000L;
            int frameCount = 0;
            while (System.nanoTime() < desiredEnd) {
// Feed any pending encoder output into the muxer.
                awaitNewImage();
            }
        } finally {
// release everything we grabbed
            releaseCamera();
        }
    }
    /**
     * Stops camera preview, and releases the camera to the system.
     */
    private void releaseCamera() {
        if (VERBOSE) Log.d(TAG, "releasing camera");
        if (mCamera != null) {
            mCamera.stopPreview();
            mCamera.release();
            mCamera = null;
        }
    }
    private void initCamera(int cameraId) {
        mCamera = Camera.open(cameraId);
        if (mCamera == null) {
            Log.d(TAG, "No front-facing camera found; opening default");
            mCamera = Camera.open();    // opens first back-facing camera
        }
        if (mCamera == null) {
            throw new RuntimeException("Unable to open camera");
        }
        Camera.Parameters parms = mCamera.getParameters();
        parms.setPreviewSize(640, 480);
        mGlTextures = new int[1];
        GLES20.glGenTextures(1, mGlTextures, 0);

        GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mGlTextures[0]);

        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER,
                GLES20.GL_NEAREST);
        GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER,
                GLES20.GL_LINEAR);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S,
                GLES20.GL_CLAMP_TO_EDGE);
        GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T,
                GLES20.GL_CLAMP_TO_EDGE);
        mSurfaceTexture = new SurfaceTexture(mGlTextures[0]);
        try {
            mCamera.setPreviewTexture(mSurfaceTexture);
        } catch (IOException e) {
// TODO Auto-generated catch block
            e.printStackTrace();
        }
        mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this);

    }
    public void awaitNewImage() {
        final int TIMEOUT_MS = 4500;
        synchronized (mFrameSyncObject) {
            while (!mFrameAvailable) {
                try {
// Wait for onFrameAvailable() to signal us.  Use a timeout to avoid
// stalling the test if it doesn't arrive.
                    if (VERBOSE) Log.i(TAG, "Waiting for Frame in Thread");
                    mFrameSyncObject.wait(TIMEOUT_MS);
                    if (!mFrameAvailable) {
// TODO: if "spurious wakeup", continue while loop
                        throw new RuntimeException("Camera frame wait timed out");
                    }
                } catch (InterruptedException ie) {
// shouldn't happen
                    throw new RuntimeException(ie);
                }
            }
            mFrameAvailable = false;
        }

    }
    @Override
    public void onFrameAvailable(SurfaceTexture st) {
        if (VERBOSE) Log.d(TAG, "new frame available");
        synchronized (mFrameSyncObject) {
            if (mFrameAvailable) {
                throw new RuntimeException("mFrameAvailable already set, frame could be dropped");
            }
            mFrameAvailable = true;
            mFrameSyncObject.notifyAll();
        }
    }
}

我想你必须在OnFrameAvailable()回调后调用SurfaceTeture.updateTextImage(),告诉相机"我用过你的最后一帧了,再给我一帧"。

(很抱歉,我的英语无法提供更好的解释)

    @Override   
    public void onFrameAvailable(SurfaceTexture surfaceTexture) {
        ...
        surfaceTexture.updateTexImage();
    }

遇到了同样的问题,似乎我忘记调用updateTexImage()

使用方法setOnFrameAvailableListener(@Nullable final OnFrameAvailableListener listener, @Nullable Handler handler) 替换setOnFrameAvailableListener(@Nullable OnFrameAvailableListener listener)

在您的情况下,您可以将代码修改为:

frameUpdateThread = new HandlerThread("frameUpdateThread");
frameUpdateThread.start();
mSurfaceTexture.setOnFrameAvailableListener(MainActivity.this, Handler(frameUpdateThread.getLooper()));

根据我的理解,onFrameAvailable应该与线程一起使用。这样,我就不会面临这个问题,也确保在收到帧

后调用updatetextImage

相关内容

  • 没有找到相关文章

最新更新