在iOS中方形裁剪和修复视频方向



>我正在使用UIImagePickerController捕获视频,我可以使用以下代码裁剪视频,

AVAsset *asset = [AVAsset assetWithURL:url];
//create an avassetrack with our asset
AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
//create a video composition and preset some settings
AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
videoComposition.frameDuration = CMTimeMake(1, 30);
//here we are setting its render size to its height x height (Square)
videoComposition.renderSize = CGSizeMake(clipVideoTrack.naturalSize.height, clipVideoTrack.naturalSize.height);
//create a video instruction
AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
instruction.timeRange = CMTimeRangeMake(kCMTimeZero, CMTimeMakeWithSeconds(60, 30));
AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
//Here we shift the viewing square up to the TOP of the video so we only see the top
CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -20);
//Use this code if you want the viewing square to be in the middle of the video
//CGAffineTransform t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height, -(clipVideoTrack.naturalSize.width - clipVideoTrack.naturalSize.height) /2 );
//Make sure the square is portrait
CGAffineTransform t2 = CGAffineTransformRotate(t1, M_PI_2);
CGAffineTransform finalTransform = t2;
[transformer setTransform:finalTransform atTime:kCMTimeZero];
//add the transformer layer instructions, then add to video composition
instruction.layerInstructions = [NSArray arrayWithObject:transformer];
videoComposition.instructions = [NSArray arrayWithObject: instruction];
//Create an Export Path to store the cropped video
NSString *outputPath = [NSString stringWithFormat:@"%@%@", NSTemporaryDirectory(), @"video.mp4"];
NSURL *exportUrl = [NSURL fileURLWithPath:outputPath];
//Remove any prevouis videos at that path
[[NSFileManager defaultManager]  removeItemAtURL:exportUrl error:nil];
//Export    
AVAssetExportSession *exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetLowQuality] ;
exporter.videoComposition = videoComposition;
exporter.outputURL = exportUrl;
exporter.outputFileType = AVFileTypeMPEG4;
[exporter exportAsynchronouslyWithCompletionHandler:^
 {
     dispatch_async(dispatch_get_main_queue(), ^{
         //Call when finished
         [self exportDidFinish:exporter];
     });
 }];

但是我不知道如何解决方向问题。像 Instagram 和 vine 应用程序一样,(即)如果我即使在横向模式下捕获视频,它也应该处于纵向模式,并且需要将视频裁剪为正方形。请给我解决方案...我正在为这个问题而苦苦挣扎...

我想源代码来自此链接(包括项目代码)

http://www.one-dreamer.com/cropping-video-square-like-vine-instagram-xcode/

您首先需要了解真实的视频方向:

- (UIImageOrientation)getVideoOrientationFromAsset:(AVAsset *)asset
{
    AVAssetTrack *videoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    CGSize size = [videoTrack naturalSize];
    CGAffineTransform txf = [videoTrack preferredTransform];
    if (size.width == txf.tx && size.height == txf.ty)
        return UIImageOrientationLeft; //return UIInterfaceOrientationLandscapeLeft;
    else if (txf.tx == 0 && txf.ty == 0)
        return UIImageOrientationRight; //return UIInterfaceOrientationLandscapeRight;
    else if (txf.tx == 0 && txf.ty == size.width)
        return UIImageOrientationDown; //return UIInterfaceOrientationPortraitUpsideDown;
    else
        return UIImageOrientationUp;  //return UIInterfaceOrientationPortrait;
}

我以一种返回正确方向的方式制作了该函数,就好像它是一个图像一样

然后,我修改了函数以固定正确的方向,支持任何裁剪区域,而不仅仅是正方形,如下所示:

// apply the crop to passed video asset (set outputUrl to avoid the saving on disk ). Return the exporter session object
- (AVAssetExportSession*)applyCropToVideoWithAsset:(AVAsset*)asset AtRect:(CGRect)cropRect OnTimeRange:(CMTimeRange)cropTimeRange ExportToUrl:(NSURL*)outputUrl ExistingExportSession:(AVAssetExportSession*)exporter WithCompletion:(void(^)(BOOL success, NSError* error, NSURL* videoUrl))completion
{
    //create an avassetrack with our asset
    AVAssetTrack *clipVideoTrack = [[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0];
    //create a video composition and preset some settings
    AVMutableVideoComposition* videoComposition = [AVMutableVideoComposition videoComposition];
    videoComposition.frameDuration = CMTimeMake(1, 30);
    CGFloat cropOffX = cropRect.origin.x;
    CGFloat cropOffY = cropRect.origin.y;
    CGFloat cropWidth = cropRect.size.width;
    CGFloat cropHeight = cropRect.size.height;
    videoComposition.renderSize = CGSizeMake(cropWidth, cropHeight);
    //create a video instruction
    AVMutableVideoCompositionInstruction *instruction = [AVMutableVideoCompositionInstruction videoCompositionInstruction];
    instruction.timeRange = cropTimeRange;
    AVMutableVideoCompositionLayerInstruction* transformer = [AVMutableVideoCompositionLayerInstruction videoCompositionLayerInstructionWithAssetTrack:clipVideoTrack];
    UIImageOrientation videoOrientation = [self getVideoOrientationFromAsset:asset];
    CGAffineTransform t1 = CGAffineTransformIdentity;
    CGAffineTransform t2 = CGAffineTransformIdentity;
    switch (videoOrientation) {
        case UIImageOrientationUp:
            t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.height - cropOffX, 0 - cropOffY );
            t2 = CGAffineTransformRotate(t1, M_PI_2 );
            break;
        case UIImageOrientationDown:
            t1 = CGAffineTransformMakeTranslation(0 - cropOffX, clipVideoTrack.naturalSize.width - cropOffY ); // not fixed width is the real height in upside down
            t2 = CGAffineTransformRotate(t1, - M_PI_2 );
            break;
        case UIImageOrientationRight:
            t1 = CGAffineTransformMakeTranslation(0 - cropOffX, 0 - cropOffY );
            t2 = CGAffineTransformRotate(t1, 0 );
            break;
        case UIImageOrientationLeft:
            t1 = CGAffineTransformMakeTranslation(clipVideoTrack.naturalSize.width - cropOffX, clipVideoTrack.naturalSize.height - cropOffY );
            t2 = CGAffineTransformRotate(t1, M_PI  );
            break;
        default:
            NSLog(@"no supported orientation has been found in this video");
            break;
    }
    CGAffineTransform finalTransform = t2;
    [transformer setTransform:finalTransform atTime:kCMTimeZero];
    //add the transformer layer instructions, then add to video composition
    instruction.layerInstructions = [NSArray arrayWithObject:transformer];
    videoComposition.instructions = [NSArray arrayWithObject: instruction];
    //Remove any prevouis videos at that path
    [[NSFileManager defaultManager]  removeItemAtURL:outputUrl error:nil];
    if (!exporter){
        exporter = [[AVAssetExportSession alloc] initWithAsset:asset presetName:AVAssetExportPresetHighestQuality] ;
    }
    // assign all instruction for the video processing (in this case the transformation for cropping the video
    exporter.videoComposition = videoComposition;
    //exporter.outputFileType = AVFileTypeQuickTimeMovie;
    if (outputUrl){
        exporter.outputURL = outputUrl;
        [exporter exportAsynchronouslyWithCompletionHandler:^{
             switch ([exporter status]) {
                 case AVAssetExportSessionStatusFailed:
                     NSLog(@"crop Export failed: %@", [[exporter error] localizedDescription]);
                     if (completion){
                         dispatch_async(dispatch_get_main_queue(), ^{
                             completion(NO,[exporter error],nil);
                         });
                         return;
                     }
                     break;
                 case AVAssetExportSessionStatusCancelled:
                     NSLog(@"crop Export canceled");
                     if (completion){
                         dispatch_async(dispatch_get_main_queue(), ^{
                             completion(NO,nil,nil);
                         });
                         return;
                     }
                     break;
                 default:
                     break;
            }
            if (completion){
                dispatch_async(dispatch_get_main_queue(), ^{
                    completion(YES,nil,outputUrl);
                });
            }
        }];
    }
    return exporter;
}

正常和前置摄像头情况下,在所有录制的视频方向(向上,向下,Lanscape R,横向L)下进行了测试。我在iPhone 5S(iOS 8.1),iPhone 6 Plus(iOS 8.1)上进行了测试。

希望对你有帮助

这是我从磁盘上的视频创建类似藤蔓的视频的代码。 这是用 swift 编写的:

static let MaxDuration: CMTimeValue = 12
class func compressVideoAsset(_ asset: AVAsset, output: URL, completion: @escaping (_ data: Data?) -> Void)
{
    let session = AVAssetExportSession(asset: asset, presetName: AVAssetExportPresetMediumQuality)!
    session.videoComposition = self.squareVideoCompositionForAsset(asset)
    session.outputURL = output
    session.outputFileType = AVFileTypeMPEG4
    session.shouldOptimizeForNetworkUse = true
    session.canPerformMultiplePassesOverSourceMediaData = true
    let duration = CMTimeValue(CGFloat(asset.duration.value) / CGFloat(asset.duration.timescale) * 30)
    session.timeRange = CMTimeRange(start: kCMTimeZero, duration: CMTime(value: min(duration, VideoCompressor.MaxDuration * 30), timescale: 30))
    session.exportAsynchronously(completionHandler: { () -> Void in
        let data = try? Data(contentsOf: output)
        DispatchQueue.main.async(execute: { () -> Void in
            completion(data)
        })
    })
}
private class func squareVideoCompositionForAsset(_ asset: AVAsset) -> AVVideoComposition
{
    let track = asset.tracks(withMediaType: AVMediaTypeVideo)[0]
    let length = min(track.naturalSize.width, track.naturalSize.height)
    var transform = track.preferredTransform
    let size = track.naturalSize
    let scale: CGFloat = (transform.a == -1 && transform.b == 0 && transform.c == 0 && transform.d == -1) ? -1 : 1 // check for inversion
    transform = transform.translatedBy(x: scale * -(size.width - length) / 2, y: scale * -(size.height - length) / 2)
    let transformer = AVMutableVideoCompositionLayerInstruction(assetTrack: track)
    transformer.setTransform(transform, at: kCMTimeZero)
    let instruction = AVMutableVideoCompositionInstruction()
    instruction.timeRange = CMTimeRange(start: kCMTimeZero, duration: kCMTimePositiveInfinity)
    instruction.layerInstructions = [transformer]
    let composition = AVMutableVideoComposition()
    composition.frameDuration = CMTime(value: 1, timescale: 30)
    composition.renderSize = CGSize(width: length, height: length)
    composition.instructions = [instruction]
    return composition
}
我知道

这个问题很老了,但有些人可能仍然想知道为什么相机胶卷中的某些视频在裁剪后会放大。我遇到了这个问题,并意识到我用作帧的cropRect没有针对视频的不同纵横比进行缩放。为了解决这个问题,我只需添加下面的代码即可将视频的最顶部裁剪成正方形。如果要更改位置,只需更改y值,但请确保根据视频进行缩放。 Luca Iaco 提供了一些很棒的代码来入门。我很感激!

CGSize videoSize = [[[asset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] naturalSize];
float scaleFactor;
if (videoSize.width > videoSize.height) {
    scaleFactor = videoSize.height/320;
}
else if (videoSize.width == videoSize.height){
    scaleFactor = videoSize.height/320;
}
else{
    scaleFactor = videoSize.width/320;
}
CGFloat cropOffX = 0;
CGFloat cropOffY = 0;
CGFloat cropWidth = 320 *scaleFactor;
CGFloat cropHeight = 320 *scaleFactor;