为 Quartz 创建可选视频输入补丁:混合输入失败

发布于 2024-12-13 17:17:04 字数 10021 浏览 3 评论 0原文

我正在尝试为 Quartz Composer 创建一个自定义补丁,其功能与视频输入补丁类似,但在输入端口上具有可选择的捕获设备。这是一个小补丁,对我来说看起来不错,但是当我连接 DV 设备 (Canopus ADVC-110) 并选择它时,ColorSpace 为 (null),并且出现异常。它适用于 FaceTime 高清摄像头(一种视频媒体类型)。我一定错过了一些东西,但我就是看不到它。

委托函数 captureOutput 一遍又一遍地触发,就像有新帧进入一样,并且捕获似乎开始正常。我缺少什么?

#import <OpenGL/CGLMacro.h>
#import "CaptureWithDevice.h"

#define kQCPlugIn_Name              @"Capture With Device"
#define kQCPlugIn_Description       @"Servies as a replacement for the default Video Input patch, and differs in that it allows the input device to be specified by the user."

@implementation CaptureWithDevice
@dynamic inputDevice, outputImage;

+ (NSDictionary*) attributes
{
    return [NSDictionary dictionaryWithObjectsAndKeys:
            kQCPlugIn_Name, QCPlugInAttributeNameKey, 
            kQCPlugIn_Description, QCPlugInAttributeDescriptionKey,
            nil];
}
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{       
    if([key isEqualToString:@"inputDevice"]) {
        NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
        NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];

        NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
        [mutableArrayOfDevice addObjectsFromArray:videoDevices];
        [mutableArrayOfDevice addObjectsFromArray:muxedDevices];

        NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
        [mutableArrayOfDevice release];

        NSMutableArray *deviceNames= [NSMutableArray array];

        int i, ic= [devices count];


        for(i= 0; i<ic; i++) {
            [deviceNames addObject:[[devices objectAtIndex:i] description]];
            // be sure not to add CT to the list
        }



        return [NSDictionary dictionaryWithObjectsAndKeys:
                @"Device", QCPortAttributeNameKey,
                QCPortTypeIndex,QCPortAttributeTypeKey,
                [NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey,
                deviceNames, QCPortAttributeMenuItemsKey,
                [NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey,
                nil];
    }
    if([key isEqualToString:@"outputImage"])
        return [NSDictionary dictionaryWithObjectsAndKeys:
                @"Video Image", QCPortAttributeNameKey,
                nil];
    return nil;
}
+ (QCPlugInExecutionMode) executionMode
{
    return kQCPlugInExecutionModeProvider;
}

+ (QCPlugInTimeMode) timeMode
{
    return kQCPlugInTimeModeIdle;
}

- (id) init
{
    if(self = [super init]) {
        [[NSNotificationCenter defaultCenter] addObserver:self 
                                                 selector:@selector(_devicesDidChange:) 
                                                     name:QTCaptureDeviceWasConnectedNotification 
                                                   object:nil];
        [[NSNotificationCenter defaultCenter] addObserver:self 
                                                 selector:@selector(_devicesDidChange:) 
                                                     name:QTCaptureDeviceWasDisconnectedNotification 
                                                   object:nil];
    }
    return self;
}

- (void) finalize
{
    [super finalize];
}

- (void) dealloc
{
    if (mCaptureSession) {
        [mCaptureSession release];
        [mCaptureDeviceInput release];
        [mCaptureDecompressedVideoOutput release];
    }
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    [super dealloc];
}

@end

@implementation CaptureWithDevice (Execution)

- (BOOL) startExecution:(id<QCPlugInContext>)context
{
    return YES;
}

- (void) enableExecution:(id<QCPlugInContext>)context
{
}
static void _BufferReleaseCallback(const void* address, void* info)
{
    CVPixelBufferUnlockBaseAddress(info, 0); 

    CVBufferRelease(info);
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
    if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){
        NSError *error = nil;
        BOOL success;

        NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
        NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];

        NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
        [mutableArrayOfDevice addObjectsFromArray:videoDevices];
        [mutableArrayOfDevice addObjectsFromArray:muxedDevices];

        NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
        [mutableArrayOfDevice release];


        NSUInteger d= self.inputDevice;
        if (!(d<[devices count])) {
            d= 0;
        }
        QTCaptureDevice *device = [devices objectAtIndex:d];
        success = [device open:&error];
        if (!success) {
            NSLog(@"Could not open device %@", device);
            self.outputImage = nil; 
            return YES;
        } 
        NSLog(@"Opened device successfully");




        [mCaptureSession release];
        mCaptureSession = [[QTCaptureSession alloc] init];

        [mCaptureDeviceInput release];
        mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device];

        // if the device is a muxed connection  make sure to get the right connection
        if ([muxedDevices containsObject:device]) {
            NSLog(@"Disabling audio connections");
            NSArray *ownedConnections = [mCaptureDeviceInput connections];
            for (QTCaptureConnection *connection in ownedConnections) {
                NSLog(@"MediaType: %@", [connection mediaType]);
                if ( [[connection mediaType] isEqualToString:QTMediaTypeSound]) {
                    [connection setEnabled:NO];
                    NSLog(@"disabling audio connection");

                }
            }
        }



        success = [mCaptureSession addInput:mCaptureDeviceInput error:&error];

        if (!success) {
            NSLog(@"Failed to add Input");
            self.outputImage = nil; 
            if (mCaptureSession) {
                [mCaptureSession release];
                mCaptureSession= nil;
            }
            if (mCaptureDeviceInput) {
                [mCaptureDeviceInput release];
                mCaptureDeviceInput= nil;

            }
            return YES;
        }




        NSLog(@"Adding output");

        [mCaptureDecompressedVideoOutput release];
        mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init];

        [mCaptureDecompressedVideoOutput setPixelBufferAttributes:
         [NSDictionary dictionaryWithObjectsAndKeys:
          [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
          [NSNumber numberWithLong:k32ARGBPixelFormat], kCVPixelBufferPixelFormatTypeKey, nil]];

        [mCaptureDecompressedVideoOutput setDelegate:self];
        success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error];

        if (!success) {
            NSLog(@"Failed to add output");
            self.outputImage = nil; 
            if (mCaptureSession) {
                [mCaptureSession release];
                mCaptureSession= nil;
            }
            if (mCaptureDeviceInput) {
                [mCaptureDeviceInput release];
                mCaptureDeviceInput= nil;
            }
            if (mCaptureDecompressedVideoOutput) {
                [mCaptureDecompressedVideoOutput release];
                mCaptureDecompressedVideoOutput= nil;
            }
            return YES;
        }

        [mCaptureSession startRunning]; 
        _currentDevice= self.inputDevice;
    }


    CVImageBufferRef imageBuffer = CVBufferRetain(mCurrentImageBuffer);

    if (imageBuffer) {
        CVPixelBufferLockBaseAddress(imageBuffer, 0);
        NSLog(@"ColorSpace: %@", CVImageBufferGetColorSpace(imageBuffer));
        //NSLog(@"ColorSpace: %@ Height: %@ Width: %@", CVImageBufferGetColorSpace(imageBuffer), CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer));
        id provider= [context outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8           
                                                                pixelsWide:CVPixelBufferGetWidth(imageBuffer)
                                                                pixelsHigh:CVPixelBufferGetHeight(imageBuffer)
                                                               baseAddress:CVPixelBufferGetBaseAddress(imageBuffer)
                                                               bytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer)
                                                           releaseCallback:_BufferReleaseCallback
                                                            releaseContext:imageBuffer
                                                                colorSpace:CVImageBufferGetColorSpace(imageBuffer)
                                                          shouldColorMatch:YES];
        if(provider == nil) {
            return NO; 
        }
        self.outputImage = provider;
    } 
    else 
        self.outputImage = nil; 

    return YES; 
}

- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
}

- (void)captureOutput:(QTCaptureOutput *)captureOutput
  didOutputVideoFrame:(CVImageBufferRef)videoFrame 
     withSampleBuffer:(QTSampleBuffer *)sampleBuffer 
       fromConnection:(QTCaptureConnection *)connection
{    
    NSLog(@"connection type: %@", [connection mediaType]);
    CVImageBufferRef imageBufferToRelease;
    CVBufferRetain(videoFrame);
    imageBufferToRelease = mCurrentImageBuffer;


    @synchronized (self) {
        mCurrentImageBuffer = videoFrame;
    }
    CVBufferRelease(imageBufferToRelease);
}
- (void)_devicesDidChange:(NSNotification *)aNotification
{
}
@end

I'm trying to create a custom patch for Quartz Composer that will function just like the Video Input patch, but with a selectable capture device on an input port. It's a small patch, and looks right to me, but when I connected a DV device (Canopus ADVC-110), and select it, the ColorSpace is (null), and I get an exception. It works fine for the FaceTime HD camera, which is a video media type. I must be missing something, but I just can't see it.

The delegate function captureOutput fires over and over like there's new frames coming in, and the capture seems to start fine. What am I missing?

#import <OpenGL/CGLMacro.h>
#import "CaptureWithDevice.h"

#define kQCPlugIn_Name              @"Capture With Device"
#define kQCPlugIn_Description       @"Servies as a replacement for the default Video Input patch, and differs in that it allows the input device to be specified by the user."

@implementation CaptureWithDevice
@dynamic inputDevice, outputImage;

+ (NSDictionary*) attributes
{
    return [NSDictionary dictionaryWithObjectsAndKeys:
            kQCPlugIn_Name, QCPlugInAttributeNameKey, 
            kQCPlugIn_Description, QCPlugInAttributeDescriptionKey,
            nil];
}
+ (NSDictionary*) attributesForPropertyPortWithKey:(NSString*)key
{       
    if([key isEqualToString:@"inputDevice"]) {
        NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
        NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];

        NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
        [mutableArrayOfDevice addObjectsFromArray:videoDevices];
        [mutableArrayOfDevice addObjectsFromArray:muxedDevices];

        NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
        [mutableArrayOfDevice release];

        NSMutableArray *deviceNames= [NSMutableArray array];

        int i, ic= [devices count];


        for(i= 0; i<ic; i++) {
            [deviceNames addObject:[[devices objectAtIndex:i] description]];
            // be sure not to add CT to the list
        }



        return [NSDictionary dictionaryWithObjectsAndKeys:
                @"Device", QCPortAttributeNameKey,
                QCPortTypeIndex,QCPortAttributeTypeKey,
                [NSNumber numberWithInt:0], QCPortAttributeMinimumValueKey,
                deviceNames, QCPortAttributeMenuItemsKey,
                [NSNumber numberWithInt:ic-1], QCPortAttributeMaximumValueKey,
                nil];
    }
    if([key isEqualToString:@"outputImage"])
        return [NSDictionary dictionaryWithObjectsAndKeys:
                @"Video Image", QCPortAttributeNameKey,
                nil];
    return nil;
}
+ (QCPlugInExecutionMode) executionMode
{
    return kQCPlugInExecutionModeProvider;
}

+ (QCPlugInTimeMode) timeMode
{
    return kQCPlugInTimeModeIdle;
}

- (id) init
{
    if(self = [super init]) {
        [[NSNotificationCenter defaultCenter] addObserver:self 
                                                 selector:@selector(_devicesDidChange:) 
                                                     name:QTCaptureDeviceWasConnectedNotification 
                                                   object:nil];
        [[NSNotificationCenter defaultCenter] addObserver:self 
                                                 selector:@selector(_devicesDidChange:) 
                                                     name:QTCaptureDeviceWasDisconnectedNotification 
                                                   object:nil];
    }
    return self;
}

- (void) finalize
{
    [super finalize];
}

- (void) dealloc
{
    if (mCaptureSession) {
        [mCaptureSession release];
        [mCaptureDeviceInput release];
        [mCaptureDecompressedVideoOutput release];
    }
    [[NSNotificationCenter defaultCenter] removeObserver:self];
    [super dealloc];
}

@end

@implementation CaptureWithDevice (Execution)

- (BOOL) startExecution:(id<QCPlugInContext>)context
{
    return YES;
}

- (void) enableExecution:(id<QCPlugInContext>)context
{
}
static void _BufferReleaseCallback(const void* address, void* info)
{
    CVPixelBufferUnlockBaseAddress(info, 0); 

    CVBufferRelease(info);
}
- (BOOL) execute:(id<QCPlugInContext>)context atTime:(NSTimeInterval)time withArguments:(NSDictionary*)arguments
{
    if (!mCaptureSession || [mCaptureSession isRunning]==NO || _currentDevice!=self.inputDevice){
        NSError *error = nil;
        BOOL success;

        NSArray *videoDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeVideo];
        NSArray *muxedDevices= [QTCaptureDevice inputDevicesWithMediaType:QTMediaTypeMuxed];

        NSMutableArray *mutableArrayOfDevice = [[NSMutableArray alloc] init ];
        [mutableArrayOfDevice addObjectsFromArray:videoDevices];
        [mutableArrayOfDevice addObjectsFromArray:muxedDevices];

        NSArray *devices = [NSArray arrayWithArray:mutableArrayOfDevice];
        [mutableArrayOfDevice release];


        NSUInteger d= self.inputDevice;
        if (!(d<[devices count])) {
            d= 0;
        }
        QTCaptureDevice *device = [devices objectAtIndex:d];
        success = [device open:&error];
        if (!success) {
            NSLog(@"Could not open device %@", device);
            self.outputImage = nil; 
            return YES;
        } 
        NSLog(@"Opened device successfully");




        [mCaptureSession release];
        mCaptureSession = [[QTCaptureSession alloc] init];

        [mCaptureDeviceInput release];
        mCaptureDeviceInput = [[QTCaptureDeviceInput alloc] initWithDevice:device];

        // if the device is a muxed connection  make sure to get the right connection
        if ([muxedDevices containsObject:device]) {
            NSLog(@"Disabling audio connections");
            NSArray *ownedConnections = [mCaptureDeviceInput connections];
            for (QTCaptureConnection *connection in ownedConnections) {
                NSLog(@"MediaType: %@", [connection mediaType]);
                if ( [[connection mediaType] isEqualToString:QTMediaTypeSound]) {
                    [connection setEnabled:NO];
                    NSLog(@"disabling audio connection");

                }
            }
        }



        success = [mCaptureSession addInput:mCaptureDeviceInput error:&error];

        if (!success) {
            NSLog(@"Failed to add Input");
            self.outputImage = nil; 
            if (mCaptureSession) {
                [mCaptureSession release];
                mCaptureSession= nil;
            }
            if (mCaptureDeviceInput) {
                [mCaptureDeviceInput release];
                mCaptureDeviceInput= nil;

            }
            return YES;
        }




        NSLog(@"Adding output");

        [mCaptureDecompressedVideoOutput release];
        mCaptureDecompressedVideoOutput = [[QTCaptureDecompressedVideoOutput alloc] init];

        [mCaptureDecompressedVideoOutput setPixelBufferAttributes:
         [NSDictionary dictionaryWithObjectsAndKeys:
          [NSNumber numberWithBool:YES], kCVPixelBufferOpenGLCompatibilityKey,
          [NSNumber numberWithLong:k32ARGBPixelFormat], kCVPixelBufferPixelFormatTypeKey, nil]];

        [mCaptureDecompressedVideoOutput setDelegate:self];
        success = [mCaptureSession addOutput:mCaptureDecompressedVideoOutput error:&error];

        if (!success) {
            NSLog(@"Failed to add output");
            self.outputImage = nil; 
            if (mCaptureSession) {
                [mCaptureSession release];
                mCaptureSession= nil;
            }
            if (mCaptureDeviceInput) {
                [mCaptureDeviceInput release];
                mCaptureDeviceInput= nil;
            }
            if (mCaptureDecompressedVideoOutput) {
                [mCaptureDecompressedVideoOutput release];
                mCaptureDecompressedVideoOutput= nil;
            }
            return YES;
        }

        [mCaptureSession startRunning]; 
        _currentDevice= self.inputDevice;
    }


    CVImageBufferRef imageBuffer = CVBufferRetain(mCurrentImageBuffer);

    if (imageBuffer) {
        CVPixelBufferLockBaseAddress(imageBuffer, 0);
        NSLog(@"ColorSpace: %@", CVImageBufferGetColorSpace(imageBuffer));
        //NSLog(@"ColorSpace: %@ Height: %@ Width: %@", CVImageBufferGetColorSpace(imageBuffer), CVPixelBufferGetWidth(imageBuffer), CVPixelBufferGetHeight(imageBuffer));
        id provider= [context outputImageProviderFromBufferWithPixelFormat:QCPlugInPixelFormatARGB8           
                                                                pixelsWide:CVPixelBufferGetWidth(imageBuffer)
                                                                pixelsHigh:CVPixelBufferGetHeight(imageBuffer)
                                                               baseAddress:CVPixelBufferGetBaseAddress(imageBuffer)
                                                               bytesPerRow:CVPixelBufferGetBytesPerRow(imageBuffer)
                                                           releaseCallback:_BufferReleaseCallback
                                                            releaseContext:imageBuffer
                                                                colorSpace:CVImageBufferGetColorSpace(imageBuffer)
                                                          shouldColorMatch:YES];
        if(provider == nil) {
            return NO; 
        }
        self.outputImage = provider;
    } 
    else 
        self.outputImage = nil; 

    return YES; 
}

- (void) disableExecution:(id<QCPlugInContext>)context
{
}
- (void) stopExecution:(id<QCPlugInContext>)context
{
}

- (void)captureOutput:(QTCaptureOutput *)captureOutput
  didOutputVideoFrame:(CVImageBufferRef)videoFrame 
     withSampleBuffer:(QTSampleBuffer *)sampleBuffer 
       fromConnection:(QTCaptureConnection *)connection
{    
    NSLog(@"connection type: %@", [connection mediaType]);
    CVImageBufferRef imageBufferToRelease;
    CVBufferRetain(videoFrame);
    imageBufferToRelease = mCurrentImageBuffer;


    @synchronized (self) {
        mCurrentImageBuffer = videoFrame;
    }
    CVBufferRelease(imageBufferToRelease);
}
- (void)_devicesDidChange:(NSNotification *)aNotification
{
}
@end

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

捂风挽笑 2024-12-20 17:17:04

通过从 mCaptureDecompressedVideoOutput 中删除 kCVPixelBufferOpenGLCompatibilityKey,我设法使该补丁能够同时使用视频和多路复用输入。虽然这允许补丁在 Quartz Composer 中完美工作,但我的目的是在 CamTwist 中使用的合成中运行此补丁,这似乎不需要 OpenGL 支持。现在,它只显示带有视频或混合输入的黑屏,而之前它是使用视频输入的。因此,我将把 CVImageBufferRef 转换为 OpenGL 纹理,看看是否可以使用它

outputImageProviderFromTextureWithPixelFormat:pixelsWide:pixelsHigh:name:flipped:releaseCallback:releaseContext:colorSpace:shouldColorMatch

I managed to get this patch to work with both Video and Muxed inputs by removing the kCVPixelBufferOpenGLCompatibilityKey from mCaptureDecompressedVideoOutput. While that allows the patch to work perfectly inside Quartz Composer, my intent is to run this patch in a composition that is used inside CamTwist, which appears not to need OpenGL support. Right now, it just displays a black screen with wither Video or Muxed inputs, where it was working with Video inputs before. So, I'm going to convert my CVImageBufferRef to an OpenGL texture and see if I can get that to work with

outputImageProviderFromTextureWithPixelFormat:pixelsWide:pixelsHigh:name:flipped:releaseCallback:releaseContext:colorSpace:shouldColorMatch
~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文