AVAssetWritter 不适用于音频

发布于 2024-10-20 12:55:13 字数 7436 浏览 0 评论 0原文

我正在尝试让音频与 iOS 应用程序的视频一起使用。视频很好。没有音频被录制到文件中(我的 iPhone 扬声器可以工作。)

这是初始化设置:

session = [[AVCaptureSession alloc] init];
    menu->session = session;
    menu_open = NO;
    session.sessionPreset = AVCaptureSessionPresetMedium;
    camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    menu->camera = camera;
    [session beginConfiguration];
    [camera lockForConfiguration:nil];
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
        camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
    }
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
        camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    }
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
        camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
    }
    if ([camera hasTorch]) {
        if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
            [camera setTorchMode:AVCaptureTorchModeOn];
        }
    }
    [camera unlockForConfiguration];
    [session commitConfiguration];
    AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil];
    [session addInput:camera_input];
    microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain];
    AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    [session addOutput:output];
    output.minFrameDuration = CMTimeMake(1,30);
    dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL);
    [output setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain];
    queue = dispatch_queue_create("MY QUEUE", NULL);
    AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease];
    special_delegate->normal_delegate = self;
    [special_delegate retain];
    [audio_output setSampleBufferDelegate:special_delegate queue:queue];
    dispatch_release(queue);
    [session startRunning];

这是录制的开始和结束:

if (recording) { //Hence stop recording
    [video_button setTitle:@"Video" forState: UIControlStateNormal];
    recording = NO;
    [writer_input markAsFinished];
    [audio_writer_input markAsFinished];
    [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];
    [video_writer finishWriting];
    UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,@selector(video:didFinishSavingWithError:contextInfo:),nil);
    [start_time release];
    [temp_url release];
    [av_adaptor release];
    [microphone lockForConfiguration:nil];
    [session beginConfiguration];
    [session removeInput:microphone_input];
    [session removeOutput:audio_output];
    [session commitConfiguration];
    [microphone unlockForConfiguration];
    [menu restateConfigiration];
    [vid_off play];
}else{ //Start recording
    [vid_on play];
    [microphone lockForConfiguration:nil];
    [session beginConfiguration];
    [session addInput:microphone_input];
    [session addOutput:audio_output];
    [session commitConfiguration];
    [microphone unlockForConfiguration];
    [menu restateConfigiration];
    [video_button setTitle:@"Stop" forState: UIControlStateNormal];
    recording = YES;
    NSError *error = nil;
    NSFileManager * file_manager = [[NSFileManager alloc] init];
    temp_url = [[NSString alloc] initWithFormat:@"%@/%@", NSTemporaryDirectory(), @"temp.mp4"];
    [file_manager removeItemAtPath: temp_url error:NULL];
    [file_manager release];
    video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error];
    NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];
    writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain];
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
    audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain];
    audio_writer_input.expectsMediaDataInRealTime = YES;
    av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain];
    [video_writer addInput:writer_input];
    [video_writer addInput: audio_writer_input];
    [video_writer startWriting];
    [video_writer startSessionAtSourceTime: CMTimeMake(0,1)];
    start_time = [[NSDate alloc] init];
}

这是音频的委托:

@implementation AudioOutputBufferDelegate
    -(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{
        if (normal_delegate->recording) {
            CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30));
            [normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer];
        }
    }
@end

视频方法并不重要,因为它可以工作。 “restateConfigiration”只是整理会话配置,否则火炬会熄灭等:

[session beginConfiguration];
    switch (quality) {
        case Low:
            session.sessionPreset = AVCaptureSessionPresetLow;
            break;
        case Medium:
            session.sessionPreset = AVCaptureSessionPreset640x480;
            break;
    }
    [session commitConfiguration];
    [camera lockForConfiguration:nil];
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
        camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
    }
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
        camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    }
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
        camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
    }
    if ([camera hasTorch]) {
        if (torch) {
            if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
                [camera setTorchMode:AVCaptureTorchModeOn];
            }
        }else{
            if([camera isTorchModeSupported:AVCaptureTorchModeOff]){
                [camera setTorchMode:AVCaptureTorchModeOff];
            }
        }
    }
    [camera unlockForConfiguration];

谢谢您的帮助。

I'm trying to get audio to work with the video for an iOS application. The video is fine. No audio is recorded to the file (My iPhone speaker works.)

Here's the init setup:

session = [[AVCaptureSession alloc] init];
    menu->session = session;
    menu_open = NO;
    session.sessionPreset = AVCaptureSessionPresetMedium;
    camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    microphone = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
    menu->camera = camera;
    [session beginConfiguration];
    [camera lockForConfiguration:nil];
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
        camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
    }
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
        camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    }
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
        camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
    }
    if ([camera hasTorch]) {
        if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
            [camera setTorchMode:AVCaptureTorchModeOn];
        }
    }
    [camera unlockForConfiguration];
    [session commitConfiguration];
    AVCaptureDeviceInput * camera_input = [AVCaptureDeviceInput deviceInputWithDevice:camera error:nil];
    [session addInput:camera_input];
    microphone_input = [[AVCaptureDeviceInput deviceInputWithDevice:microphone error:nil] retain];
    AVCaptureVideoDataOutput * output = [[[AVCaptureVideoDataOutput alloc] init] autorelease];
    output.videoSettings = [NSDictionary dictionaryWithObject: [NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey];
    [session addOutput:output];
    output.minFrameDuration = CMTimeMake(1,30);
    dispatch_queue_t queue = dispatch_queue_create("MY QUEUE", NULL);
    [output setSampleBufferDelegate:self queue:queue];
    dispatch_release(queue);
    audio_output = [[[AVCaptureAudioDataOutput alloc] init] retain];
    queue = dispatch_queue_create("MY QUEUE", NULL);
    AudioOutputBufferDelegate * special_delegate = [[[AudioOutputBufferDelegate alloc] init] autorelease];
    special_delegate->normal_delegate = self;
    [special_delegate retain];
    [audio_output setSampleBufferDelegate:special_delegate queue:queue];
    dispatch_release(queue);
    [session startRunning];

Here is the beginning and end of recording:

if (recording) { //Hence stop recording
    [video_button setTitle:@"Video" forState: UIControlStateNormal];
    recording = NO;
    [writer_input markAsFinished];
    [audio_writer_input markAsFinished];
    [video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];
    [video_writer finishWriting];
    UISaveVideoAtPathToSavedPhotosAlbum(temp_url,self,@selector(video:didFinishSavingWithError:contextInfo:),nil);
    [start_time release];
    [temp_url release];
    [av_adaptor release];
    [microphone lockForConfiguration:nil];
    [session beginConfiguration];
    [session removeInput:microphone_input];
    [session removeOutput:audio_output];
    [session commitConfiguration];
    [microphone unlockForConfiguration];
    [menu restateConfigiration];
    [vid_off play];
}else{ //Start recording
    [vid_on play];
    [microphone lockForConfiguration:nil];
    [session beginConfiguration];
    [session addInput:microphone_input];
    [session addOutput:audio_output];
    [session commitConfiguration];
    [microphone unlockForConfiguration];
    [menu restateConfigiration];
    [video_button setTitle:@"Stop" forState: UIControlStateNormal];
    recording = YES;
    NSError *error = nil;
    NSFileManager * file_manager = [[NSFileManager alloc] init];
    temp_url = [[NSString alloc] initWithFormat:@"%@/%@", NSTemporaryDirectory(), @"temp.mp4"];
    [file_manager removeItemAtPath: temp_url error:NULL];
    [file_manager release];
    video_writer = [[AVAssetWriter alloc] initWithURL: [NSURL fileURLWithPath:temp_url] fileType: AVFileTypeMPEG4 error: &error];
    NSDictionary *video_settings = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey,[NSNumber numberWithInt:360], AVVideoWidthKey,[NSNumber numberWithInt:480], AVVideoHeightKey,nil];
    writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:video_settings] retain];
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;
    audio_writer_input = [[AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings: [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithInt: kAudioFormatMPEG4AAC], AVFormatIDKey,[NSNumber numberWithInt: 1], AVNumberOfChannelsKey,[NSNumber numberWithFloat: 44100.0], AVSampleRateKey,[NSNumber numberWithInt: 64000], AVEncoderBitRateKey,[NSData dataWithBytes: &acl length: sizeof(acl) ], AVChannelLayoutKey,nil]] retain];
    audio_writer_input.expectsMediaDataInRealTime = YES;
    av_adaptor = [[AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput: writer_input sourcePixelBufferAttributes:NULL] retain];
    [video_writer addInput:writer_input];
    [video_writer addInput: audio_writer_input];
    [video_writer startWriting];
    [video_writer startSessionAtSourceTime: CMTimeMake(0,1)];
    start_time = [[NSDate alloc] init];
}

Here is the delegate for the audio:

@implementation AudioOutputBufferDelegate
    -(void)captureOutput: (AVCaptureOutput *) captureOutput didOutputSampleBuffer: (CMSampleBufferRef) sampleBuffer fromConnection: (AVCaptureConnection *) conenction{
        if (normal_delegate->recording) {
            CMSampleBufferSetOutputPresentationTimeStamp(sampleBuffer,CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: normal_delegate->start_time],30));
            [normal_delegate->audio_writer_input appendSampleBuffer: sampleBuffer];
        }
    }
@end

The video method doesn't matter because it works. "restateConfigiration" just sorts out the session configuration otherwise the torch goes off etc:

[session beginConfiguration];
    switch (quality) {
        case Low:
            session.sessionPreset = AVCaptureSessionPresetLow;
            break;
        case Medium:
            session.sessionPreset = AVCaptureSessionPreset640x480;
            break;
    }
    [session commitConfiguration];
    [camera lockForConfiguration:nil];
    if([camera isExposureModeSupported:AVCaptureExposureModeContinuousAutoExposure]){
        camera.exposureMode = AVCaptureExposureModeContinuousAutoExposure;
    }
    if([camera isFocusModeSupported:AVCaptureFocusModeContinuousAutoFocus]){
        camera.focusMode = AVCaptureFocusModeContinuousAutoFocus;
    }
    if([camera isWhiteBalanceModeSupported:AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance]){
        camera.whiteBalanceMode = AVCaptureWhiteBalanceModeContinuousAutoWhiteBalance;
    }
    if ([camera hasTorch]) {
        if (torch) {
            if([camera isTorchModeSupported:AVCaptureTorchModeOn]){
                [camera setTorchMode:AVCaptureTorchModeOn];
            }
        }else{
            if([camera isTorchModeSupported:AVCaptureTorchModeOff]){
                [camera setTorchMode:AVCaptureTorchModeOff];
            }
        }
    }
    [camera unlockForConfiguration];

THank you for any help.

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。

评论(1

茶底世界 2024-10-27 12:55:13

AVAssetWriter 和音频

这可能与链接帖子中提到的问题相同。尝试注释掉这些行

[writer_input markAsFinished];
[audio_writer_input markAsFinished];
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];

编辑

我不知道您设置演示时间戳的方式是否一定是错误的。我处理这个问题的方法是使用一个在启动时设置为 0 的局部变量。然后,当我的代表收到第一个数据包时,我会这样做:

if (_startTime.value == 0) {
    _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}

然后

[bufferWriter->writer startWriting];
[bufferWriter->writer startSessionAtSourceTime:_startTime];

当您计算每个收到的数据包的时间差时,您的代码看起来有效。但是,AVFoundation 会为您计算此值,并优化放置在交错容器中的时间戳。我不确定的另一件事是每个音频 CMSampleBufferRef 包含多个数据缓冲区,其中每个数据缓冲区都有自己的 PTS。我不确定设置 PTS 是否会自动调整所有其他数据缓冲区。

我的代码与您的代码不同之处在于我对音频和视频使用单个调度队列。在我使用的回调中(删除了一些代码)。

switch (bufferWriter->writer.status) {
    case AVAssetWriterStatusUnknown:

        if (_startTime.value == 0) {
            _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        }

        [bufferWriter->writer startWriting];
        [bufferWriter->writer startSessionAtSourceTime:_startTime];

        //Break if not ready, otherwise fall through.
        if (bufferWriter->writer.status != AVAssetWriterStatusWriting) {
            break ;
        }

    case AVAssetWriterStatusWriting:
        if( captureOutput == self.captureManager.audioOutput) {
                if( !bufferWriter->audioIn.readyForMoreMediaData) { 
                    break;
                }

                @try {
                    if( ![bufferWriter->audioIn appendSampleBuffer:sampleBuffer] ) {
                        [self delegateMessage:@"Audio Writing Error" withType:ERROR];
                    }
                }
                @catch (NSException *e) {
                    NSLog(@"Audio Exception: %@", [e reason]);
                }
        }
        else if( captureOutput == self.captureManager.videoOutput ) {

            if( !bufferWriter->videoIn.readyForMoreMediaData) { 
                break;; 
            }

            @try {
                if (!frontCamera) {
                    if( ![bufferWriter->videoIn appendSampleBuffer:sampleBuffer] ) {
                        [self delegateMessage:@"Video Writing Error" withType:ERROR];
                    }
                }
                else {
                    CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

                    flipBuffer(sampleBuffer, pixelBuffer);

                    if( ![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt] ) {
                        [self delegateMessage:@"Video Writing Error" withType:ERROR];
                    }
                }

            }
            @catch (NSException *e) {
                NSLog(@"Video Exception Exception: %@", [e reason]);
            }
        }

        break;
    case AVAssetWriterStatusCompleted:
        return;
    case AVAssetWriterStatusFailed: 
        [self delegateMessage:@"Critical Error Writing Queues" withType:ERROR];
        bufferWriter->writer_failed = YES ;
        _broadcastError = YES;
        [self stopCapture] ;
        return;
    case AVAssetWriterStatusCancelled:
        break;
    default:
        break;
}

AVAssetWriter and Audio

This may be the same issue as mentioned in the linked post. Try commenting out these lines

[writer_input markAsFinished];
[audio_writer_input markAsFinished];
[video_writer endSessionAtSourceTime: CMTimeMakeWithSeconds([[NSDate date] timeIntervalSinceDate: start_time],30)];

Edit

I don't know if the way you are setting the presentation time stamp is necessarily wrong. The way I handle this is with a local variable that is set to 0 on start. Then when my delegate receives the first packet I do:

if (_startTime.value == 0) {
    _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
}

and then

[bufferWriter->writer startWriting];
[bufferWriter->writer startSessionAtSourceTime:_startTime];

Your code looks valid as you are calculating the time difference for each received packet. However, AVFoundation calculates this for you, and also optimizes the timestamps for placement in the interleaved container. Another thing I am unsure of is each CMSampleBufferRef for audio contains more then 1 data buffer where each data buffer has it's own PTS. I am not sure if setting the PTS automatically adjusts all the other data buffers.

Where my code differs from yours is I use a single dispatch queue for both audio and video. In the callback I use (some code removed).

switch (bufferWriter->writer.status) {
    case AVAssetWriterStatusUnknown:

        if (_startTime.value == 0) {
            _startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        }

        [bufferWriter->writer startWriting];
        [bufferWriter->writer startSessionAtSourceTime:_startTime];

        //Break if not ready, otherwise fall through.
        if (bufferWriter->writer.status != AVAssetWriterStatusWriting) {
            break ;
        }

    case AVAssetWriterStatusWriting:
        if( captureOutput == self.captureManager.audioOutput) {
                if( !bufferWriter->audioIn.readyForMoreMediaData) { 
                    break;
                }

                @try {
                    if( ![bufferWriter->audioIn appendSampleBuffer:sampleBuffer] ) {
                        [self delegateMessage:@"Audio Writing Error" withType:ERROR];
                    }
                }
                @catch (NSException *e) {
                    NSLog(@"Audio Exception: %@", [e reason]);
                }
        }
        else if( captureOutput == self.captureManager.videoOutput ) {

            if( !bufferWriter->videoIn.readyForMoreMediaData) { 
                break;; 
            }

            @try {
                if (!frontCamera) {
                    if( ![bufferWriter->videoIn appendSampleBuffer:sampleBuffer] ) {
                        [self delegateMessage:@"Video Writing Error" withType:ERROR];
                    }
                }
                else {
                    CMTime pt = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);

                    flipBuffer(sampleBuffer, pixelBuffer);

                    if( ![bufferWriter->adaptor appendPixelBuffer:pixelBuffer withPresentationTime:pt] ) {
                        [self delegateMessage:@"Video Writing Error" withType:ERROR];
                    }
                }

            }
            @catch (NSException *e) {
                NSLog(@"Video Exception Exception: %@", [e reason]);
            }
        }

        break;
    case AVAssetWriterStatusCompleted:
        return;
    case AVAssetWriterStatusFailed: 
        [self delegateMessage:@"Critical Error Writing Queues" withType:ERROR];
        bufferWriter->writer_failed = YES ;
        _broadcastError = YES;
        [self stopCapture] ;
        return;
    case AVAssetWriterStatusCancelled:
        break;
    default:
        break;
}
~没有更多了~
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文