如何使用MediaMuxer将音频合并到视频中



我想使用MediaMuxer将音频合并到视频中,但是每次当我分层应用程序时,它都崩溃了。我研究了很多,但代码是正确的(在我的前瞻性中(。 它在logcat中计算帧,但在停止之后。它甚至没有显示我在代码中提供的 Toast 知道。 或者有没有办法将音频合并到视频中。

这是我的代码:

File file = new File(Environment.getExternalStorageDirectory() + File.separator + "final2.mp4");
file.createNewFile();
output = file.getAbsolutePath();
MediaExtractor videoExtractor = new MediaExtractor();
videoExtractor.setDataSource(videopath);;
MediaExtractor audioExtractor = new MediaExtractor();
audioExtractor.setDataSource(audiopath);
Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount() );
Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount() );
MediaMuxer muxer = new MediaMuxer(output, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
videoExtractor.selectTrack(0);
MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
int videoTrack = muxer.addTrack(videoFormat);
audioExtractor.selectTrack(0);
MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
int audioTrack = muxer.addTrack(audioFormat);
Log.d(TAG, "Video Format " + videoFormat.toString() );
Log.d(TAG, "Audio Format " + audioFormat.toString() );
boolean sawEOS = false;
int frameCount = 0;
int offset = 100;
int sampleSize = 256 * 1024;
ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
muxer.start();
while (!sawEOS)
{
videoBufferInfo.offset = offset;
videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);

if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
Log.d(TAG, "saw input EOS.");
sawEOS = true;
videoBufferInfo.size = 0;
}
else
{
videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
videoBufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
videoExtractor.advance();

frameCount++;
Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);
}
}
Toast.makeText(getApplicationContext() , "frame:" + frameCount , Toast.LENGTH_SHORT).show();

boolean sawEOS2 = false;
int frameCount2 =0;
while (!sawEOS2)
{
frameCount2++;
audioBufferInfo.offset = offset;
audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);
if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
Log.d(TAG, "saw input EOS.");
sawEOS2 = true;
audioBufferInfo.size = 0;
}
else
{
audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
audioBufferInfo.flags = audioExtractor.getSampleFlags();
muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
audioExtractor.advance();

Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);
}
}
Toast.makeText(getApplicationContext() , "frame:" + frameCount2 , Toast.LENGTH_SHORT).show();
muxer.stop();
muxer.release();

这是我的日志猫:

09-03 13:31:56.350 31898 31898 E   ActivityThread                               Performing stop of activity that is not resumed: {com.mycompany.myapp/com.mycompany.myapp.MainActivity}
09-03 13:31:56.350 31898 31898 E   ActivityThread                               java.lang.RuntimeException: Performing stop of activity that is not resumed: {com.mycompany.myapp/com.mycompany.myapp.MainActivity}
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.app.ActivityThread.performStopActivityInner(ActivityThread.java:3552)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.app.ActivityThread.handleStopActivity(ActivityThread.java:3633)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.app.ActivityThread.access$1300(ActivityThread.java:164)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.app.ActivityThread$H.handleMessage(ActivityThread.java:1491)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.os.Handler.dispatchMessage(Handler.java:102)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.os.Looper.loop(Looper.java:160)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at android.app.ActivityThread.main(ActivityThread.java:5541)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at java.lang.reflect.Method.invoke(Native Method)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at java.lang.reflect.Method.invoke(Method.java:372)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:964)
09-03 13:31:56.350 31898 31898 E   ActivityThread                               at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:759)
09-03 13:40:05.646 2792 2792 E     WVMExtractor com.mycompany.myapp             Failed to open libwvm.so: dlopen failed: library "libwvm.so" not found
public class Merge extends AsyncTask<String, String, String>
{
@Override
protected void onPostExecute(String result)
{
Log.i("result", result);
super.onPostExecute(result);
}

@Override
protected String doInBackground(String[] p1)
{
String au = p1[0];
String vi = p1[1];
Log.i("au", au);
Log.i("vi", vi);
//Log.d(TAG, "Video Extractor Track Count " + videoExtractor.getTrackCount() );
//Log.d(TAG, "Audio Extractor Track Count " + audioExtractor.getTrackCount() );
try
{
File file = new File(Environment.getExternalStorageDirectory() + File.separator + "m24.mp4");
file.createNewFile();
output = file.getAbsolutePath();
MediaExtractor videoExtractor = new MediaExtractor();
videoExtractor.setDataSource(vi);
MediaExtractor audioExtractor = new MediaExtractor();
audioExtractor.setDataSource(au);
MediaMuxer muxer = new MediaMuxer(output, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
videoExtractor.selectTrack(0);
MediaFormat videoFormat = videoExtractor.getTrackFormat(0);
int videoTrack = muxer.addTrack(videoFormat);
audioExtractor.selectTrack(0);
MediaFormat audioFormat = audioExtractor.getTrackFormat(0);
int audioTrack = muxer.addTrack(audioFormat);
Log.d(TAG, "Video Format " + videoFormat.toString() );
Log.d(TAG, "Audio Format " + audioFormat.toString() );
boolean sawEOS = false;
int frameCount = 0;
int offset = 100;
int sampleSize = 256 * 1024;
ByteBuffer videoBuf = ByteBuffer.allocate(sampleSize);
ByteBuffer audioBuf = ByteBuffer.allocate(sampleSize);
MediaCodec.BufferInfo videoBufferInfo = new MediaCodec.BufferInfo();
MediaCodec.BufferInfo audioBufferInfo = new MediaCodec.BufferInfo();

videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
audioExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
muxer.start();
while (!sawEOS)
{
videoBufferInfo.offset = offset;
videoBufferInfo.size = videoExtractor.readSampleData(videoBuf, offset);

if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
Log.d("GGv", "saw input EOS.");
sawEOS = true;
videoBufferInfo.size = 0;
}
else
{
videoBufferInfo.presentationTimeUs = videoExtractor.getSampleTime();
videoBufferInfo.flags = MediaCodec.BUFFER_FLAG_SYNC_FRAME;
muxer.writeSampleData(videoTrack, videoBuf, videoBufferInfo);
videoExtractor.advance();

frameCount++;
//Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
//Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);
}
}
//Toast.makeText(getApplicationContext() , "frame:" + frameCount , Toast.LENGTH_SHORT).show();

boolean sawEOS2 = false;
int frameCount2 =0;
while (!sawEOS2)
{
frameCount2++;
audioBufferInfo.offset = offset;
audioBufferInfo.size = audioExtractor.readSampleData(audioBuf, offset);
if (videoBufferInfo.size < 0 || audioBufferInfo.size < 0)
{
Log.d("GGa", "saw input EOS.");
sawEOS2 = true;
audioBufferInfo.size = 0;
}
else
{
audioBufferInfo.presentationTimeUs = audioExtractor.getSampleTime();
audioBufferInfo.flags = audioExtractor.getSampleFlags();
muxer.writeSampleData(audioTrack, audioBuf, audioBufferInfo);
audioExtractor.advance();

//Log.d(TAG, "Frame (" + frameCount + ") Video PresentationTimeUs:" + videoBufferInfo.presentationTimeUs +" Flags:" + videoBufferInfo.flags +" Size(KB) " + videoBufferInfo.size / 1024);
//Log.d(TAG, "Frame (" + frameCount + ") Audio PresentationTimeUs:" + audioBufferInfo.presentationTimeUs +" Flags:" + audioBufferInfo.flags +" Size(KB) " + audioBufferInfo.size / 1024);
}
}
//Toast.makeText(getApplicationContext() , "frame:" + frameCount2 , Toast.LENGTH_SHORT).show();
muxer.stop();
muxer.release();
String su = "Su";
return su;
}
catch (IOException e)
{}
return null;
}
}

Merge m = new Merge();
m.execute(audiopath, videopath);

最新更新