我试图为Quartz Composer创建一个自定义补丁,它的功能就像视频输入补丁一样,但在输入端口上有一个可选择的捕获设备。这是一个小补丁,对我来说是正确的,但是当我连接DV设备(Canopus ADVC-110)并选择它时,ColorSpace是(null),我得到一个异常。它适用于视频媒体类型的FaceTime高清摄像头。我一定是错过了什么,但我就是看不见。
委托函数captureOutput一遍又一遍地触发,就像有新的帧进来一样,并且捕获似乎开始得很好。我错过了什么?
#import <OpenGL/CGLMacro.h>
#import "CaptureWithDevice.h"
#define kQCPlugIn_Name @"Capture With Device"
#define kQCPlugIn_Description @"Servies as a replacement for the default Video Input patch, and differs in that it allows the input device to be specified by the user."
@implementation CaptureWithDevice
@dynamic inputDevice, outputImage;
+ (NSDictionary*) attributes
{
return [NSDictionary dictionaryWithObjectsAndKeys:
kQCPlugIn_Name, QCPlugInAttributeNameKey,
kQCPlugIn_Description, QCPlugInAttributeDescriptionKey,
nil];
}
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{
if([key isEqualToString:@"inputDevice"]) {
NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];
NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
[mutableArrayOfDevice addObjectsFromArray:videoDevices];
[mutableArrayOfDevice addObjectsFromArray:muxedDevices];
NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
[mutableArrayOfDevice release];
NSMutableArray *deviceNames= [NSMutableArray array];
int i, ic= [devices count];
for(i= 0; i<ic; i++) {
[deviceNames addObject:[[devices objectAtIndex:i] description]];
// be sure not to add CT to the list
}
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Device", QCPortAttributeNameKey,
QCPortTypeIndex,QCPortAttributeTypeKey,
[NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey,
deviceNames, QCPortAttributeMenuItemsKey,
[NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey,
nil];
}
if([key isEqualToString:@"outputImage"])
return [NSDictionary dictionaryWithObjectsAndKeys:
@"Video Image", QCPortAttributeNameKey,
nil];
return nil;
}
+ (QCPlugInExecutionMode) executionMode
{
return kQCPlugInExecutionModeProvider;
}
+ (QCPlugInTimeMode) timeMode
{
return kQCPlugInTimeModeIdle;
}
- (id) init
{
if(self = [super init]) {
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasConnectedNotification
object:nil];
[[NSNotificationCenter defaultCenter] addObserver:self
selector:@selector(_devicesDidChange:)
name:QTCaptureDeviceWasDisconnectedNotification
object:nil];
}
return self;
}
- (void) finalize
{
[super finalize];
}
- (void) dealloc
{
if (mCaptureSession) {
[mCaptureSession release];
[mCaptureDeviceInput release];
[mCaptureDecompressedVideoOutput release];
}
[[NSNotificationCenter defaultCenter] removeObserver:self];
[super dealloc];
}
@end
@implementation CaptureWithDevice (Execution)
- (BOOL) startExecution:(id<QCPlugInContext>)context
{
return YES;
}
- (void) enableExecution:(id<QCPlugInContext>)context
{
}
static void _BufferReleaseCallback(const void* address, void* info)
{
CVPixelBufferUnlockBaseAddress(info, 0);
CVBufferRelease(info);
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){
NSError *error = nil;
BOOL success;
NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];
NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
[mutableArrayOfDevice addObjectsFromArray:videoDevices];
[mutableArrayOfDevice addObjectsFromArray:muxedDevices];
NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
[mutableArrayOfDevice release];
NSUInteger d= self.inputDevice;
if (!(d<[devices count])) {
d= 0;
}
QTCaptureDevice *device = [devices objectAtIndex:d];
success = [device open:&error];
if (!success) {
NSLog(@"Could not open device %@", device);
self.outputImage = nil;
return YES;
}
NSLog(@"Opened device successfully");
[mCaptureSession release];
mCaptureSession = [[QTCaptureSession alloc] init];
[mCaptureDeviceInput release];
mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device];
// if the device is a muxed connection make sure to get the right connection
if ([muxedDevices containsObject:device]) {
NSLog(@"Disabling audio connections");
NSArray *ownedConnections = [mCaptureDeviceInput connections];
for (QTCaptureConnection *connection in ownedConnections) {
NSLog(@"MediaType: %@", [connection mediaType]);
if ( [[connection mediaType] isEqualToString:QTMediaTypeSound]) {
[connection setEnabled:NO];
NSLog(@"disabling audio connection");
}
}
}
success = [mCaptureSession addInput:mCaptureDeviceInput error:&error];
if (!success) {
NSLog(@"Failed to add Input");
self.outputImage = nil;
if (mCaptureSession) {
[mCaptureSession release];
mCaptureSession= nil;
}
if (mCaptureDeviceInput) {
[mCaptureDeviceInput release];
mCaptureDeviceInput= nil;
}
return YES;
}
NSLog(@"Adding output");
[mCaptureDecompressedVideoOutput release];
mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init];
[mCaptureDecompressedVideoOutput setPixelBufferAttributes:
[NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
[NSNumber numberWithLong:k32ARGBPixelFormat], kCVPixelBufferPixelFormatTypeKey, nil]];
[mCaptureDecompressedVideoOutput setDelegate:self];
success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error];
if (!success) {
NSLog(@"Failed to add output");
self.outputImage = nil;
if (mCaptureSession) {
[mCaptureSession release];
mCaptureSession= nil;
}
if (mCaptureDeviceInput) {
[mCaptureDeviceInput release];
mCaptureDeviceInput= nil;
}
if (mCaptureDecompressedVideoOutput) {
[mCaptureDecompressedVideoOutput release];
mCaptureDecompressedVideoOutput= nil;
}
return YES;
}
[mCaptureSession startRunning];
_currentDevice= self.inputDevice;
}
CVImageBufferRef imageBuffer = CVBufferRetain(mCurrentImageBuffer);
if (imageBuffer) {
CVPixelBufferLockBaseAddress(imageBuffer, 0);
NSLog(@"ColorSpace: %@", CVImageBufferGetColorSpace(imageBuffer));
//NSLog(@"ColorSpace: %@ Height: %@ Width: %@", CVImageBufferGetColorSpace(imageBuffer), CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer));
id provider= [context outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8
pixelsWide:CVPixelBufferGetWidth(imageBuffer)
pixelsHigh:CVPixelBufferGetHeight(imageBuffer)
baseAddress:CVPixelBufferGetBaseAddress(imageBuffer)
bytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer)
releaseCallback:_BufferReleaseCallback
releaseContext:imageBuffer
colorSpace:CVImageBufferGetColorSpace(imageBuffer)
shouldColorMatch:YES];
if(provider == nil) {
return NO;
}
self.outputImage = provider;
}
else
self.outputImage = nil;
return YES;
}
- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
}
- (void)captureOutput:(QTCaptureOutput *)captureOutput
didOutputVideoFrame:(CVImageBufferRef)videoFrame
withSampleBuffer:(QTSampleBuffer *)sampleBuffer
fromConnection:(QTCaptureConnection *)connection
{
NSLog(@"connection type: %@", [connection mediaType]);
CVImageBufferRef imageBufferToRelease;
CVBufferRetain(videoFrame);
imageBufferToRelease = mCurrentImageBuffer;
@synchronized (self) {
mCurrentImageBuffer = videoFrame;
}
CVBufferRelease(imageBufferToRelease);
}
- (void)_devicesDidChange:(NSNotification *)aNotification
{
}
@end
我设法通过从mCaptureDecompressedVideoOutput中删除kCVPixelBufferOpenGLCompatibilityKey来让这个补丁与视频和Muxed输入一起工作。虽然这允许补丁在Quartz Composer中完美地工作,但我的意图是在CamTwist中使用的组合中运行这个补丁,这似乎不需要OpenGL支持。现在,它只是显示一个黑屏,没有视频或混合输入,而之前它是用视频输入工作的。因此,我要将CVImageBufferRef转换为OpenGL纹理看看能否在
中使用它outputImageProviderFromTextureWithPixelFormat:pixelsWide:pixelsHigh:name:flipped:releaseCallback:releaseContext:colorSpace:shouldColorMatch