媒体编解码器在解码H264文件时总是超时



我一直试图用安卓的MediaCodec对通过H264编码编码的视频文件进行解码,并试图将解码器的输出放到表面,但当我运行应用程序时,它显示黑色表面,在DDMS logcat中,我看到解码器超时

我首先将文件解析为有效的[首先读取4个字节,指示即将到来的帧的长度,然后读取长度-数量-字节,指示该帧,然后再次读取下一帧的长度的4个字节等等。]然后将帧循环传给解码器。在配置解码器时,我通过了sps&ppsmediaFormat中,通过直接从编码文件硬编码值[我通过使用hexedit打开该文件获得这些值]。我没有设置任何presentationTimeUS,并使用了0。现在解码器的dequeInputBuffer()方法返回>=0值,但dequeOutputBuffer(。

请看一下我的代码,并提供帮助。

提前谢谢。

这是文件url-https://drive.google.com/file/d/0B39qOyEnXlR8Z3FSb2lzTWlORUU/edit?usp=sharing

这是我的代码-

import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import android.app.Activity;
import android.media.MediaCodec;
import android.media.MediaCodec.BufferInfo;
import android.media.MediaFormat;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.util.Log;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.widget.Toast;
public class MainActivity extends Activity implements SurfaceHolder.Callback 
{
private static final String filePath = Environment.getExternalStorageDirectory()+ "/H264Data1.264"; // + "/video_encoded.263";//"/video_encoded.264";
private PlayerThread mPlayer = null;
Handler handler = null;
public static byte[] SPS = null;
public static byte[] PPS = null;
public static ArrayList<Frame> frames = null;
public static int frameID = 0;
public static boolean incompleteLastFrame = false;
File encodedFile = new File(filePath);
InputStream is;
private static class Frame
{
public int id;
public byte[] frameData;
public Frame(int id)
{
this.id = id;
}
}
@Override
protected void onCreate(Bundle savedInstanceState) 
{
super.onCreate(savedInstanceState);
try 
{
is = new FileInputStream(encodedFile);
byte[] data = new byte[(int)encodedFile.length()];
System.out.println("Total file size : " + encodedFile.length());
frameID = 0;
frames = new ArrayList<Frame>();
try {
if ((is.read(data, 0, (int)encodedFile.length())) != -1) 
{
getFramesFromData(data);
Toast.makeText(getApplicationContext(), "frames processing finished. number of frames : " + frames.size(), Toast.LENGTH_SHORT).show();
SurfaceView sv = new SurfaceView(this);
handler = new Handler();
sv.getHolder().addCallback(this);
setContentView(sv);
}
} catch (IOException e) {
e.printStackTrace();
}
} catch (FileNotFoundException e) {
e.printStackTrace();
}

}
public static void getFramesFromData(byte[] data) 
{
int dataLength = data.length;
int frameLength = 0;
frameID = 0;
if(data.length <= 0) return;
// each iteration in this loop indicates generation of a new frame
for(int i = 0; ; )
{
if(i+3 >= dataLength) return;
frameLength = ((data[i] & 0xff) << 24)
+ ((data[i + 1] & 0xff) << 16)
+ ((data[i + 2] & 0xff) << 8)
+ (data[i + 3] & 0xff);
i += 4;
if(frameLength > 0)
{
if(i+frameLength-1 >= dataLength) return;
Frame frame = new Frame(frameID);
frame.frameData = new byte[frameLength];
System.arraycopy(data, i, frame.frameData, 0, frameLength);
frames.add(frame);
frameID++;
i += frameLength;
}
}
}
@Override
public void surfaceCreated(SurfaceHolder holder) 
{
Log.d("DecodeActivity", "in surfaceCreated");
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 
{
Log.d("DecodeActivity", "in surfaceChanged");
if (mPlayer == null) 
{
Toast.makeText(getApplicationContext(), "in surfaceChanged. creating playerthread", Toast.LENGTH_SHORT).show();
mPlayer = new PlayerThread(holder.getSurface());
mPlayer.start();
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) 
{
if (mPlayer != null) 
{
mPlayer.interrupt();    
}
}
private class PlayerThread extends Thread 
{
//private MediaExtractor extractor;
private MediaCodec decoder;
private Surface surface;
public PlayerThread(Surface surface) 
{
this.surface = surface;
}
@Override
public void run() 
{
handler.post(new Runnable()
{
@Override
public void run() 
{
decoder = MediaCodec.createDecoderByType("video/avc");
MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240);
byte[] header_sps  = { 0x00, 0x00, 0x00, 0x01, 0x67, 0x42, (byte)0x80, 0x0C, (byte)0xE4, 0x40, (byte)0xA0, (byte)0xFD, 0x00, (byte)0xDA, 0x14, 0x26, (byte)0xA0 };
byte[] header_pps = {0x00, 0x00, 0x00, 0x01, 0x68, (byte)0xCE, 0x38, (byte)0x80 };
mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps));
mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps));
decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */);
if (decoder == null) 
{
Log.e("DecodeActivity", "Can't find video info!");
return;
}
decoder.start();
Log.d("DecodeActivity", "decoder.start() called");
ByteBuffer[] inputBuffers = decoder.getInputBuffers();
ByteBuffer[] outputBuffers = decoder.getOutputBuffers();

long startMs = System.currentTimeMillis();
int i = 0;
while(!Thread.interrupted())
{
if(i >= frames.size()) 
break;
byte[] data = new byte[frames.get(i).frameData.length];
System.arraycopy(frames.get(i).frameData, 0, data, 0, frames.get(i).frameData.length);
Log.d("DecodeActivity", "i = " + i + " dataLength = " + frames.get(i).frameData.length);
int inIndex = 0; 
while ((inIndex = decoder.dequeueInputBuffer(1)) < 0)
;
if (inIndex >= 0) 
{
ByteBuffer buffer = inputBuffers[inIndex];
buffer.clear();
int sampleSize = data.length;
if (sampleSize < 0) 
{
Log.d("DecodeActivity", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} 
else 
{
Log.d("DecodeActivity", "sample size: " + sampleSize);
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);
}

BufferInfo info = new BufferInfo();
int outIndex = decoder.dequeueOutputBuffer(info, 100000);
switch (outIndex) 
{
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
Log.d("DecodeActivity", "INFO_OUTPUT_BUFFERS_CHANGED");
outputBuffers = decoder.getOutputBuffers();
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
Log.d("DecodeActivity", "New format " + decoder.getOutputFormat());
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
Log.d("DecodeActivity", "dequeueOutputBuffer timed out!");
try {
sleep(100);
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
break;
default:
ByteBuffer outbuffer = outputBuffers[outIndex];
Log.d("DecodeActivity", "We can't use this buffer but render it due to the API limit, " + outbuffer);
/*while (info.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) 
{
try 
{
sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
break;
}
}*/
decoder.releaseOutputBuffer(outIndex, true);
break;
}
i++;
// All decoded frames have been rendered, we can stop playing now
/*if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) 
{
Log.d("DecodeActivity", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
break;
}*/
}
}
decoder.stop();
decoder.release();
}
});         
}
}
}

这部分看起来不对:

ByteBuffer buffer = inputBuffers[inIndex];
[...]
buffer = ByteBuffer.allocate(data.length);
buffer.put(data);
decoder.queueInputBuffer(inIndex, 0, sampleSize, 0, 0);

你得到了输入缓冲区,然后忽略它,而选择你自己分配的缓冲区。将ByteBuffer.allocate()调用替换为buffer.clear()

您所做的与DecodeEditEncodeTest中的检查过程非常相似,只是后者只是将整个内容保存在内存中,而不是将其序列化到磁盘。看看checkVideoData()

您可能希望采用测试的方法,将区块标志与数据串行化。如果你这样做了,你就不需要特别处理SPS/PPS头了——只需像其他块一样将其写入流中(它恰好设置了CODEC_CONFIG标志)。串行化时间戳也是一个好主意,除非保证输入视频具有已知的、不变化的帧速率。

最新更新