使用 AVAssetWriter 捕获屏幕 - 在模拟器上运行良好,但在设备上会产生黑色视频

发布于 2024-11-19 14:15:44 字数 6301 浏览 2 评论 0原文

我正在尝试捕获帧缓冲区数据并将其转换为我的 iPhone 游戏的视频。 我使用 AVAssetWriter 来完成这件事。 该代码在模拟器上运行良好,但在设备本身上运行不佳。在设备上它会生成一个黑色视频,

我使用以下代码:

//This code initializes the AVAsetWriter and other things

- (void) testVideoWriter {
    CGRect screenBoundst = [[UIScreen mainScreen] bounds];
    //initialize global info
    MOVIE_NAME = @"Documents/Movie5.mp4";
    //CGSize size = CGSizeMake(screenBoundst.size.width, screenBoundst.size.height);
    CGSize size = CGSizeMake(320, 480);
    frameLength = CMTimeMake(1, 5); 
    currentTime = kCMTimeZero;
    currentFrame = 0;

    MOVIE_PATH = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];
    NSError *error = nil;

    videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:MOVIE_PATH]
                                            fileType:AVFileTypeMPEG4 error:&error];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width ], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height ], AVVideoHeightKey, nil];

    writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                           [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
                                                           kCVPixelBufferPixelFormatTypeKey, nil];

    adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:
               writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    [adaptor retain];

    CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
    flipVertical = CGAffineTransformRotate(flipVertical,(90.0*3.14f/180.0f));
    [writerInput setTransform:flipVertical];

    [videoWriter addInput:writerInput];

    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    VIDEO_WRITER_IS_READY = true;
}

//this code capture the screen data
- (void) captureScreenVideo {

    if (!writerInput.readyForMoreMediaData) {
        return;
    }

    CGRect screenBounds = [[UIScreen mainScreen] bounds];
    NSLog(@"width : %f Height : %f",screenBounds.size.width,screenBounds.size.height);
    CGSize esize = CGSizeMake(screenBounds.size.width, screenBounds.size.height);
    NSInteger myDataLength = esize.width * esize.height * 4;
    GLuint *buffer = (GLuint *) malloc(myDataLength);
    glReadPixels(0, 0, esize.width, esize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
    CVPixelBufferRef pixel_buffer = NULL;
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, esize.width,
                                          esize.height, kCVPixelFormatType_32BGRA, (CFDictionaryRef) options, 
                                          &pixel_buffer);
    NSParameterAssert(status == kCVReturnSuccess && pixel_buffer != NULL);

    CVPixelBufferLockBaseAddress(pixel_buffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pixel_buffer);
    NSParameterAssert(pixel_buffer != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, esize.width,
                                                 esize.height, 8, 4*esize.width, rgbColorSpace, 
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);

    CGAffineTransform flipVerticalq = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
    flipVerticalq = CGAffineTransformRotate(flipVerticalq,(90.0*3.14f/180.0f));
    CGContextConcatCTM(context, flipVerticalq);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);

    if(![adaptor appendPixelBuffer:pixel_buffer withPresentationTime:currentTime]) {
        NSLog(@"FAIL");
    } else {
        NSLog(@"Success:%d", currentFrame);
        currentTime = CMTimeAdd(currentTime, frameLength);
    }

    free(buffer);
    CVPixelBufferRelease(pixel_buffer);
}

// this code saves the video to library on device itself
- (void) moveVideoToSavedPhotos {

    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    NSString *localVid = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];    
    NSURL* fileURL = [NSURL fileURLWithPath:localVid];
        NSLog(@"movie saved %@",fileURL);
    BOOL isVideoOK = UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(localVid);
    if (NO == isVideoOK)
        NSLog(@"Video at %@ is not compatible",localVid);
    else {
        NSLog(@"video ok");
    }

    [library writeVideoAtPathToSavedPhotosAlbum:fileURL
                                completionBlock:^(NSURL *assetURL, NSError *error) {
                                    if (error) {   
                                        NSLog(@"%@: Error saving context: %@", [self class], [error localizedDescription]);
                                    }
                                }];
    [library release];
}

// following code stops the video recording after particular number of frames

    if (VIDEO_WRITER_IS_READY) {
        [self captureScreenVideo];
        currentFrame++;

        if (currentFrame > 500) {
            VIDEO_WRITER_IS_READY = false;
            [writerInput markAsFinished];

            if (![videoWriter finishWriting]) {
                NSLog(@"writing not finished");
            }

            CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 
            [writerInput release];
            [videoWriter release]; 


            NSLog(@"saving the file");
            [self moveVideoToSavedPhotos];
        }
    }

此外,当模拟器上捕获的视频是镜像视频时...我不知道我错在哪里.. .请向我澄清这件事...并希望你们不介意阅读整个代码..

i m trying to capture the frame buffer data and convert into video for my iphone game..
i m using AVAssetWriter to accomplish this thing.
The code is working fine on simulator but not on device itself.. on device it generates a black video

i m using the follwing code :

//This code initializes the AVAsetWriter and other things

- (void) testVideoWriter {
    CGRect screenBoundst = [[UIScreen mainScreen] bounds];
    //initialize global info
    MOVIE_NAME = @"Documents/Movie5.mp4";
    //CGSize size = CGSizeMake(screenBoundst.size.width, screenBoundst.size.height);
    CGSize size = CGSizeMake(320, 480);
    frameLength = CMTimeMake(1, 5); 
    currentTime = kCMTimeZero;
    currentFrame = 0;

    MOVIE_PATH = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];
    NSError *error = nil;

    videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:MOVIE_PATH]
                                            fileType:AVFileTypeMPEG4 error:&error];

    NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
                                   [NSNumber numberWithInt:size.width ], AVVideoWidthKey,
                                   [NSNumber numberWithInt:size.height ], AVVideoHeightKey, nil];

    writerInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSettings];

    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                           [NSNumber numberWithInt:kCVPixelFormatType_32BGRA],
                                                           kCVPixelBufferPixelFormatTypeKey, nil];

    adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:
               writerInput sourcePixelBufferAttributes:sourcePixelBufferAttributesDictionary];
    [adaptor retain];

    CGAffineTransform flipVertical = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
    flipVertical = CGAffineTransformRotate(flipVertical,(90.0*3.14f/180.0f));
    [writerInput setTransform:flipVertical];

    [videoWriter addInput:writerInput];

    [videoWriter startWriting];
    [videoWriter startSessionAtSourceTime:kCMTimeZero];

    VIDEO_WRITER_IS_READY = true;
}

//this code capture the screen data
- (void) captureScreenVideo {

    if (!writerInput.readyForMoreMediaData) {
        return;
    }

    CGRect screenBounds = [[UIScreen mainScreen] bounds];
    NSLog(@"width : %f Height : %f",screenBounds.size.width,screenBounds.size.height);
    CGSize esize = CGSizeMake(screenBounds.size.width, screenBounds.size.height);
    NSInteger myDataLength = esize.width * esize.height * 4;
    GLuint *buffer = (GLuint *) malloc(myDataLength);
    glReadPixels(0, 0, esize.width, esize.height, GL_RGBA, GL_UNSIGNED_BYTE, buffer);
    CVPixelBufferRef pixel_buffer = NULL;
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                             nil];
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, esize.width,
                                          esize.height, kCVPixelFormatType_32BGRA, (CFDictionaryRef) options, 
                                          &pixel_buffer);
    NSParameterAssert(status == kCVReturnSuccess && pixel_buffer != NULL);

    CVPixelBufferLockBaseAddress(pixel_buffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pixel_buffer);
    NSParameterAssert(pixel_buffer != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, esize.width,
                                                 esize.height, 8, 4*esize.width, rgbColorSpace, 
                                                 kCGImageAlphaNoneSkipFirst);
    NSParameterAssert(context);

    CGAffineTransform flipVerticalq = CGAffineTransformMake(1, 0, 0, -1, 0, self.frame.size.height);
    flipVerticalq = CGAffineTransformRotate(flipVerticalq,(90.0*3.14f/180.0f));
    CGContextConcatCTM(context, flipVerticalq);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pixel_buffer, 0);

    if(![adaptor appendPixelBuffer:pixel_buffer withPresentationTime:currentTime]) {
        NSLog(@"FAIL");
    } else {
        NSLog(@"Success:%d", currentFrame);
        currentTime = CMTimeAdd(currentTime, frameLength);
    }

    free(buffer);
    CVPixelBufferRelease(pixel_buffer);
}

// this code saves the video to library on device itself
- (void) moveVideoToSavedPhotos {

    ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
    NSString *localVid = [NSHomeDirectory() stringByAppendingPathComponent:MOVIE_NAME];    
    NSURL* fileURL = [NSURL fileURLWithPath:localVid];
        NSLog(@"movie saved %@",fileURL);
    BOOL isVideoOK = UIVideoAtPathIsCompatibleWithSavedPhotosAlbum(localVid);
    if (NO == isVideoOK)
        NSLog(@"Video at %@ is not compatible",localVid);
    else {
        NSLog(@"video ok");
    }

    [library writeVideoAtPathToSavedPhotosAlbum:fileURL
                                completionBlock:^(NSURL *assetURL, NSError *error) {
                                    if (error) {   
                                        NSLog(@"%@: Error saving context: %@", [self class], [error localizedDescription]);
                                    }
                                }];
    [library release];
}

// following code stops the video recording after particular number of frames

    if (VIDEO_WRITER_IS_READY) {
        [self captureScreenVideo];
        currentFrame++;

        if (currentFrame > 500) {
            VIDEO_WRITER_IS_READY = false;
            [writerInput markAsFinished];

            if (![videoWriter finishWriting]) {
                NSLog(@"writing not finished");
            }

            CVPixelBufferPoolRelease(adaptor.pixelBufferPool); 
            [writerInput release];
            [videoWriter release]; 


            NSLog(@"saving the file");
            [self moveVideoToSavedPhotos];
        }
    }

also when the video captured on simulator is mirrored video...i dont have any idea where m wrong...please clarify me this thing... and hope you guys dont mind reading the whole code..

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。
列表为空,暂无数据
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文