混合音频文件和视频文件在一定范围内



你好,希望你有一个美好的一天,我正在使用这种方法将视频文件与音频文件混合在一起:

public String mix(String videoFile, String audioFile,String targetFile) {
    Movie video;
    try {
        video = MovieCreator.build(videoFile);
    } catch (RuntimeException e) {
        e.printStackTrace();
        return null;
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    }
    Movie audio;
    try {
        audio = MovieCreator.build(audioFile);
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    } catch (NullPointerException e) {
        e.printStackTrace();
        return null;
    }
    int size = audio.getTracks().size();
    com.googlecode.mp4parser.authoring.Track audioTrack = audio.getTracks().get((size - 1));
    video.addTrack(audioTrack);
    Container out = new DefaultMp4Builder().build(video);
    try {
        RandomAccessFile ram = new RandomAccessFile(new File(targetFile),"rw");
        FileChannel fc = ram.getChannel();
        out.writeContainer(fc);
        ram.close();
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    }
    return targetFile;
}

但是我想知道如何混合特定开始时间和结束时间范围的音频?多谢

找到了解决方案

public String mix(String videoFile, String audioFile,String targetFile,long start,long end) {
    Movie video;
    try {
        video = MovieCreator.build(videoFile);
    } catch (RuntimeException e) {
        e.printStackTrace();
        return null;
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    }
    Movie audio;
    try {
        audio = MovieCreator.build(audioFile);
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    } catch (NullPointerException e) {
        e.printStackTrace();
        return null;
    }
    int size = audio.getTracks().size();
    com.googlecode.mp4parser.authoring.Track audioTrack = audio.getTracks().get((size - 1));
    //video.addTrack(audioTrack);

    long currentSample = 0;
    double currentTime = 0;
    double lastTime = -1;
    long startSample1 = -1;
    long endSample1 = -1;
    for (int i = 0; i < audioTrack.getSampleDurations().length; i++) {
        long delta = audioTrack.getSampleDurations()[i];

        if (currentTime > lastTime && currentTime <= start) {
            // current sample is still before the new starttime
            startSample1 = currentSample;
        }
        if (currentTime > lastTime && currentTime <= end) {
            // current sample is after the new start time and still before the new endtime
            endSample1 = currentSample;
        }
        lastTime = currentTime;
        currentTime += (double) delta / (double) audioTrack.getTrackMetaData().getTimescale();
        currentSample++;
    }
    try {
        video.addTrack(new AppendTrack(new CroppedTrack(audioTrack, startSample1, endSample1)));
    } catch (IOException e) {
        e.printStackTrace();
    }
    //  video.addTrack(new CroppedTrack(audioTrack,start,end));
    Container out = new DefaultMp4Builder().build(video);
    try {
        RandomAccessFile ram = new RandomAccessFile(new File(targetFile),"rw");
        FileChannel fc = ram.getChannel();
        out.writeContainer(fc);
        ram.close();
    } catch (IOException e) {
        e.printStackTrace();
        return null;
    }
    return targetFile;
}

最新更新