屏幕捕获,包括带有叠加按钮的AVCaptureVideoPreviewLayer



>我正在使用屏幕录像机来捕获屏幕。当视图被填充在iPhone屏幕中时,它就可以完美地工作。当AVCaptureVideoPreviewLayer显示带有覆盖按钮时,保存的屏幕捕获的视频显示没有AVCaptureVideoPreviewLayer的叠加按钮。我已经使用本教程来添加叠加层。如何解决这个问题?

- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
    @autoreleasepool {
            if ([connection isVideoOrientationSupported])
                [connection setVideoOrientation:[self cameraOrientation]];
        CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        /*Lock the image buffer*/
        CVPixelBufferLockBaseAddress(imageBuffer,0);
        /*Get information about the image*/
        uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
        size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
        size_t width = CVPixelBufferGetWidth(imageBuffer);
        size_t height = CVPixelBufferGetHeight(imageBuffer);
        /*Create a CGImageRef from the CVImageBufferRef*/
        CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
        CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
        CGImageRef newImage = CGBitmapContextCreateImage(newContext);
        /*We release some components*/
        CVPixelBufferUnlockBaseAddress(imageBuffer,0);
        CGContextRelease(newContext);
        CGColorSpaceRelease(colorSpace);
        UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
        image1= [UIImage imageWithCGImage:newImage];
        /*We relase the CGImageRef*/
        CGImageRelease(newImage);

        dispatch_sync(dispatch_get_main_queue(), ^{
            [self.imageView setImage:image1];
        });
    }
}

使用 NSTimer 运行writeaSample

-(void) writeSample: (NSTimer*) _timer {
    if (assetWriterInput.readyForMoreMediaData) {
        // CMSampleBufferRef sample = nil;
        @autoreleasepool {
            CVReturn cvErr = kCVReturnSuccess;
            // get screenshot image!
            UIGraphicsBeginImageContext(baseViewOne.frame.size);
            [[baseViewOne layer] renderInContext:UIGraphicsGetCurrentContext()];
            screenshota = UIGraphicsGetImageFromCurrentImageContext();
            UIGraphicsEndImageContext();

            //CGImageRef image = (CGImageRef) [[self screenshot] CGImage];
            CGImageRef image = (CGImageRef) [screenshota CGImage];
            //NSLog (@"made screenshot");
            // prepare the pixel buffer
            CVPixelBufferRef pixelBuffer = NULL;
            CFDataRef imageData= CGDataProviderCopyData(CGImageGetDataProvider(image));
            //NSLog (@"copied image data");
            cvErr = CVPixelBufferCreateWithBytes(kCFAllocatorDefault,
                                                 baseViewOne.frame.size.width,baseViewOne.frame.size.height,
                                                 kCVPixelFormatType_32BGRA,
                                                 (void*)CFDataGetBytePtr(imageData),
                                                 CGImageGetBytesPerRow(image),
                                                 NULL,
                                                 NULL,
                                                 NULL,
                                                 &pixelBuffer);
            //NSLog (@"CVPixelBufferCreateWithBytes returned %d", cvErr);
            // calculate the time
            CMTime presentationTime;
                CFAbsoluteTime thisFrameWallClockTime = CFAbsoluteTimeGetCurrent();
                elapsedTime = thisFrameWallClockTime - (firstFrameWallClockTime+pausedFrameTime);
                // NSLog (@"elapsedTime: %f", elapsedTime);
                presentationTime =  CMTimeMake (elapsedTime * TIME_SCALE, TIME_SCALE);
                BOOL appended = [assetWriterPixelBufferAdaptor appendPixelBuffer:pixelBuffer withPresentationTime:presentationTime];
                if (appended) {
                    CVPixelBufferRelease( pixelBuffer );
                    CFRelease(imageData);
                    pixelBuffer = nil;
                    //NSLog (@"appended sample at time %lf", CMTimeGetSeconds(presentationTime));
                } else {
                    [self stopRecording];
                }

        }
    }

}

最新更新