使用带有 Camera2 API 的媒体编解码器录制视频



我正在尝试使用MediaCodec在onImageAvailable回调中记录来自ImageReader的原始帧,但无法编写工作代码。大多数示例都使用 Camera 1 API 或 MediaRecorder。我的目标是捕获单个帧处理它并从中创建mp4

原始 YUV 帧

@Override
public void onImageAvailable(ImageReader reader) {
Image i = reader.acquireLatestImage();
processImage(i);
i.close();
Log.d("hehe", "onImageAvailable");
}
};

媒体编解码器

MediaCodec codec = MediaCodec.createByCodecName(name);
MediaFormat mOutputFormat; // member variable
codec.setCallback(new MediaCodec.Callback() {
@Override
void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
ByteBuffer inputBuffer = codec.getInputBuffer(inputBufferId);
// fill inputBuffer with valid data
…
codec.queueInputBuffer(inputBufferId, …);
}
@Override
void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …) {
ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufferId);
MediaFormat bufferFormat = codec.getOutputFormat(outputBufferId); // option A
// bufferFormat is equivalent to mOutputFormat
// outputBuffer is ready to be processed or rendered.
…
codec.releaseOutputBuffer(outputBufferId, …);
}
@Override
void onOutputFormatChanged(MediaCodec mc, MediaFormat format) {
// Subsequent data will conform to new format.
// Can ignore if using getOutputFormat(outputBufferId)
mOutputFormat = format; // option B
}
@Override
void onError(…) {
…
}
});
codec.configure(format, …);
mOutputFormat = codec.getOutputFormat(); // option B
codec.start();
// wait for processing to complete
codec.stop();
codec.release();

我无法关联 https://developer.android.com/reference/android/media/MediaCodec 上给出的代码。请帮忙

您必须创建一个队列,将从图像平面创建的图像缓冲区推送到队列中,并在void onInputBufferAvailable(MediaCodec mc, int inputBufferId)中处理它

1(创建一个类来包装缓冲区数据:

class MyData{
byte[] buffer;
long presentationTimeUs;
// to tell your encoder that is a EOS, otherwise you can not know when to stop
boolean isEOS; 
public MyData(byte[] buffer,long presentationTimeUs, boolean isEOS){
this.buffer = new byte[buffer.length];
System.arraycopy(buffer, 0, this.buffer, 0, buffer.length);
this.presentationTimeUs = presentationTimeUs;
this.isEOS = isEOS;
}
public byte[] getBuffer() {
return buffer;
}
public void setBuffer(byte[] buffer) {
this.buffer = buffer;
}
public long getPresentationTimeUs() {
return presentationTimeUs;
}
public void setPresentationTimeUs(long presentationTimeUs) {
this.presentationTimeUs = presentationTimeUs;
}
public boolean isEOS() {
return isEOS;
}
public void setEOS(boolean EOS) {
isEOS = EOS;
}
}

2(创建队列:

Queue<MyData> mQueue = new LinkedList<MyData>();

3(使用本机代码将图像平面转换为字节数组(byte[](:

  • 为 Gradle 文件添加原生支持:

    android{
    compileSdkVersion 27
    defaultConfig {
    ...
    externalNativeBuild {
    cmake {
    arguments "-DANDROID_STL=stlport_static"
    cppFlags "-std=c++11"
    }
    }
    }
    externalNativeBuild {
    cmake {
    path "CMakeLists.txt"
    }
    }
    ...
    }
    
    • 创建一个函数将图像平面转换为字节数组:

(本机-yuv-to-buffer.cpp(

extern "C" JNIEXPORT 
jbyteArray JNICALL
Java_labs_farzi_camera2previewstream_MainActivity_yuvToBuffer
(
JNIEnv *env,
jobject instance,
jobject yPlane,
jobject uPlane,
jobject vPlane,
jint yPixelStride,
jint yRowStride,
jint uPixelStride,
jint uRowStride,
jint vPixelStride,
jint vRowStride,
jint imgWidth,
jint imgHeight) {
bbuf_yIn = static_cast<uint8_t *>(env->GetDirectBufferAddress(yPlane));
bbuf_uIn = static_cast<uint8_t *>(env->GetDirectBufferAddress(uPlane));
bbuf_vIn = static_cast<uint8_t *>(env->GetDirectBufferAddress(vPlane));
buf = (uint8_t *) malloc(sizeof(uint8_t) * imgWidth * imgHeight +
2 * (imgWidth + 1) / 2 * (imgHeight + 1) / 2);
bool isNV21;
if (yPixelStride == 1) {
// All pixels in a row are contiguous; copy one line at a time.
for (int y = 0; y < imgHeight; y++)
memcpy(buf + y * imgWidth, bbuf_yIn + y * yRowStride,
static_cast<size_t>(imgWidth));
} else {
// Highly improbable, but not disallowed by the API. In this case
// individual pixels aren't stored consecutively but sparsely with
// other data inbetween each pixel.
for (int y = 0; y < imgHeight; y++)
for (int x = 0; x < imgWidth; x++)
buf[y * imgWidth + x] = bbuf_yIn[y * yRowStride + x * yPixelStride];
}
uint8_t *chromaBuf = &buf[imgWidth * imgHeight];
int chromaBufStride = 2 * ((imgWidth + 1) / 2);
if (uPixelStride == 2 && vPixelStride == 2 &&
uRowStride == vRowStride && bbuf_vIn == bbuf_uIn + 1) {
isNV21 = true;
// The actual cb/cr planes happened to be laid out in
// exact NV21 form in memory; copy them as is
for (int y = 0; y < (imgHeight + 1) / 2; y++)
memcpy(chromaBuf + y * chromaBufStride, bbuf_vIn + y * vRowStride,
static_cast<size_t>(chromaBufStride));
} else if (vPixelStride == 2 && uPixelStride == 2 &&
uRowStride == vRowStride && bbuf_vIn == bbuf_uIn + 1) {
isNV21 = false;
// The cb/cr planes happened to be laid out in exact NV12 form
// in memory; if the destination API can use NV12 in addition to
// NV21 do something similar as above, but using cbPtr instead of crPtr.
// If not, remove this clause and use the generic code below.
} else {
isNV21 = true;
if (vPixelStride == 1 && uPixelStride == 1) {
// Continuous cb/cr planes; the input data was I420/YV12 or similar;
// copy it into NV21 form
for (int y = 0; y < (imgHeight + 1) / 2; y++) {
for (int x = 0; x < (imgWidth + 1) / 2; x++) {
chromaBuf[y * chromaBufStride + 2 * x + 0] = bbuf_vIn[y * vRowStride + x];
chromaBuf[y * chromaBufStride + 2 * x + 1] = bbuf_uIn[y * uRowStride + x];
}
}
} else {
// Generic data copying into NV21
for (int y = 0; y < (imgHeight + 1) / 2; y++) {
for (int x = 0; x < (imgWidth + 1) / 2; x++) {
chromaBuf[y * chromaBufStride + 2 * x + 0] = bbuf_vIn[y * vRowStride +
        x * uPixelStride];
chromaBuf[y * chromaBufStride + 2 * x + 1] = bbuf_uIn[y * uRowStride +
        x * vPixelStride];
}
}
}
}
uint8_t *I420Buff = (uint8_t *) malloc(sizeof(uint8_t) * imgWidth * imgHeight +
2 * (imgWidth + 1) / 2 * (imgHeight + 1) / 2);
SPtoI420(buf,I420Buff,imgWidth,imgHeight,isNV21);
jbyteArray ret = env->NewByteArray(imgWidth * imgHeight *
3/2);
env->SetByteArrayRegion (ret, 0, imgWidth * imgHeight *
3/2, (jbyte*)I420Buff);
free(buf);
free (I420Buff);
return ret;
}
- **Adding a function to convert Semi-planar to planar :** 
bool SPtoI420(const uint8_t *src, uint8_t *dst, int width, int height, bool isNV21)
{
if (!src || !dst) {
return false;
}
unsigned int YSize = width * height;
unsigned int UVSize = (YSize>>1);
// NV21: Y..Y + VUV...U
const uint8_t *pSrcY = src;
const uint8_t *pSrcUV = src + YSize;
// I420: Y..Y + U.U + V.V
uint8_t *pDstY = dst;
uint8_t *pDstU = dst + YSize;
uint8_t *pDstV = dst + YSize + (UVSize>>1);
// copy Y
memcpy(pDstY, pSrcY, YSize);
// copy U and V
for (int k=0; k < (UVSize>>1); k++) {
if(isNV21) {
pDstV[k] = pSrcUV[k * 2];     // copy V
pDstU[k] = pSrcUV[k * 2 + 1];   // copy U
}else{
pDstU[k] = pSrcUV[k * 2];     // copy V
pDstV[k] = pSrcUV[k * 2 + 1];   // copy U
}
}
return true;}

4(将缓冲区推送到队列:

private final ImageReader.OnImageAvailableListener mOnGetPreviewListener
= new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null)
return;
final Image.Plane[] planes = image.getPlanes();
Image.Plane yPlane = planes[0];
Image.Plane uPlane = planes[1];
Image.Plane vPlane = planes[2];
byte[] mBuffer = yuvToBuffer(yPlane.getBuffer(),
uPlane.getBuffer(),
vPlane.getBuffer(),
yPlane.getPixelStride(),
yPlane.getRowStride(),
uPlane.getPixelStride(),
uPlane.getRowStride(),
vPlane.getPixelStride(),
vPlane.getRowStride(),
image.getWidth(),
image.getHeight());
mQueue.add(new MyData(mBuffer, image.getTimestamp(), false));
image.close();
Log.d("hehe", "onImageAvailable");
}
};

5(对数据进行编码并保存h264视频文件(VLC播放(:

public void onInputBufferAvailable(MediaCodec mc, int inputBufferId) {
ByteBuffer inputBuffer = mc.getInputBuffer(inputBufferId);
Log.d(TAG, "onInputBufferAvailable: ");
// fill inputBuffer with valid data
MyData data = mQueue.poll();
if (data != null) {
// check if is EOS and process with EOS flag if is the case
// else if NOT EOS
if (inputBuffer != null) {
Log.e(TAG, "onInputBufferAvailable: "+data.getBuffer().length);
inputBuffer.clear();
inputBuffer.put(data.getBuffer());
mc.queueInputBuffer(inputBufferId,
0,
data.getBuffer().length,
data.getPresentationTimeUs(),
0);
}
} else {
mc.queueInputBuffer(inputBufferId,
0,
0,
0,
0);
}
}
@Override
public void onOutputBufferAvailable(@NonNull MediaCodec codec, int index, @NonNull MediaCodec.BufferInfo info) {
Log.d(TAG, "onOutputBufferAvailable: ");
ByteBuffer outputBuffer = codec.getOutputBuffer(index);
byte[] outData = new byte[info.size];
if (outputBuffer != null) {
outputBuffer.get(outData);
try {
fos.write(outData);
} catch (IOException e) {
e.printStackTrace();
}
}
codec.releaseOutputBuffer(index,false);
}

6(在void onOutputBufferAvailable(MediaCodec mc, int outputBufferId, …)复用您的轨道,处理类似于您可以在Internet上找到的同步模式示例。

我希望我的回答能帮助你

完整的示例代码在这里

为什么不试试这个例子: https://github.com/googlesamples/android-Camera2Video

我认为它肯定会满足您的所有要求,如果您无法与上述示例中的代码相关联,您可以随时与我联系。

此示例使用 Camera2 API,并且您想要从原始 YUV 帧进行转换,这可以使用它完成。因此,如果您希望您浏览一次给定的示例并使用其代码在所需的应用程序中录制 MP4 视频,则不会遇到任何问题或问题。

例如 - a(在这种情况下,您必须实现 CameraDevice.StateCallback 来接收有关相机设备状态更改的事件。重写其方法以设置 CameraDevice 实例、启动预览以及停止和释放相机。

b(开始预览时,将媒体录像机设置为接受视频格式。

c( 然后,在 CameraDevice 实例上使用 createCaptureRequest(CameraDevice.TEMPLATE_RECORD( 设置 CaptureRequest.Builder。

d( 然后,在 CameraDevice 实例上使用 createCaptureSession(surfaces, new CameraCaptureSession.StateCallback(({}( 方法实现 CameraCaptureSession.StateCallback,其中 Surface 是由 TextureView 的表面视图和MediaRecorder实例的表面组成的列表。

e( 在MediaRecorder实例上使用 start(( 和 stop(( 方法来实际启动和停止录制。

f( 最后,在 onResume(( 和 onPause(( 中设置并清理您的相机设备。

快乐编码。

最新更新