视频录制将时间戳添加到视频时失败

发布于 2025-02-02 03:43:57 字数 9769 浏览 3 评论 0原文

我正在制作iOS视频录制应用程序。当我将cmsamplebuffer转换为uiimage并添加时间戳,然后将其遮盖回CMSampleBuffer,然后将其附加到AvassetWriterInput。当我没有添加时间戳并将接收到的cmsamplebuffer直接添加到avassetWriterInput中时,它可以正确录制,但是当我将时间戳添加到接收到的图像中时,它会失败录制视频。我该如何解决?

    private func setupCaptureSession() {
        session.sessionPreset = .vga640x480

        guard
            let videoDevice = AVCaptureDevice.default(for: .video),
            let audioDevice = AVCaptureDevice.default(for: .audio),
            let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
            let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
            fatalError()
        }

        session.beginConfiguration()
        session.addInput(videoInput)
        session.addInput(audioInput)
        session.addOutput(videoOutput)
        session.addOutput(audioOutput)
        session.commitConfiguration()

        DispatchQueue.main.async { [weak self] in
            self?.session.startRunning()
        }
    }

    private func startRecording() {
        self.startUnixtime = DateUtility.getUnixtime()
        
        self.startTimeForDisplayingTimeCounter = Date()
        self.startTimer()
        self.elapsedTimeLabel.text = "00:00:00"
        
        // AVAssetWriter
        assetWriter = try! AVAssetWriter(outputURL: self.exportURL!, fileType: .mov)

        // video
        let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
            AVVideoCodecKey : AVVideoCodecType.h264,
            AVVideoWidthKey : 640,
            AVVideoHeightKey : 480
        ])
        videoInput.expectsMediaDataInRealTime = true
        assetWriter?.add(videoInput)

        // audio
        let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
        audioInput.expectsMediaDataInRealTime = true
        assetWriter?.add(audioInput)

        assetWriter?.startWriting()

        // Delegate
        let queue = DispatchQueue.global()
        videoOutput.setSampleBufferDelegate(self, queue: queue)
        audioOutput.setSampleBufferDelegate(self, queue: queue)
    }

    private func finishRecording() {
        self.endUnixtime = DateUtility.getUnixtime()
        
        self.elapsedTimeLabel.text = "00:00:00"
        self.timer?.invalidate()
        
        videoOutput.setSampleBufferDelegate(nil, queue: nil)
        audioOutput.setSampleBufferDelegate(nil, queue: nil)

        startTime = nil

        assetWriter?.finishWriting { [weak self] in
            guard let self = self else { return }
            guard self.assetWriter!.status == .completed else { fatalError("failed recording") }

            self.saveToPhotoLibrary { isSaveToPhotoLibrarySucceed in
                print("vide saved to photo library")
                
                guard isSaveToPhotoLibrarySucceed else {
                    print("Save to photo library failed")
                    return
                }
                
                self.saveToRealmFromTempVideo {
                    self.uploadVideoToServer()
                }
            }
        }
    }


    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

        guard isRecording else { fatalError() }

        guard CMSampleBufferDataIsReady(sampleBuffer) else {
            print("not ready")
            return
        }

        if startTime == nil {
            startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
            assetWriter?.startSession(atSourceTime: startTime!)
        }

        // Append video or audio
        
        let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
        
        if mediaType == .video {
            appendVideo(from: sampleBuffer)
        } else if mediaType == .audio {
            appendAudio(from: sampleBuffer)
        } else {
            fatalError("should not reach here")
        }
    }

    // Append Video
    func appendVideo(from sampleBuffer: CMSampleBuffer) {
        
        // - Guards
        
        guard let videoInput = assetWriter?.inputs.first(where: { $0.mediaType == .video }) else {
            print("video input not found")
            return
        }
        
        guard videoInput.isReadyForMoreMediaData else {
            print("video input not ready for more media data")
            return
        }
        
        // - Timestamp
        
        let sample: Sample = Sample(sampleBuffer: sampleBuffer)
        
        guard let ciImage = generateCIImage(from: sampleBuffer) else {
            print("CIImage creation from sampleBuffer failed")
            return
        }
        
        let uiImage = UIImage(ciImage: ciImage)
        
        guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
            fatalError("should not reach here")
        }
        
        guard let timestampAddedCvpixelBuffer = timestampAddedImage.toCVPixelBuffer() else {
            print("CVPixelBuffer creation from CIImage failed")
            return
        }
        
        guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
            print("CMSampleBuffer creation from CVPixelBufer failed")
            return
        }
        
        DispatchQueue.main.sync { [weak self] in
            self?.compositeImageView.image = timestampAddedImage
        }
        
        print("append video")
        videoInput.append(timestampAddedSampleBuffer)
    }

    func addTimestamp(on image: UIImage) -> UIImage? {
        let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
        UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
        image.draw(in: imageRect)
        
        // Text Attributes
        let textColor = UIColor.white
        let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
        
        let textFontAttributes = [
            NSAttributedString.Key.font: textFont,
            NSAttributedString.Key.foregroundColor: textColor,
            NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
        ]
        
        let formatter = DateFormatter()
        formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
        let timestamp: NSString = formatter.string(from: Date()) as NSString
        
        let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
        timestamp.draw(in: textRect, withAttributes: textFontAttributes)
        
        // New Image
        let newImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext()
        
        return newImage
    }

    func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
        var sampleBuffer: CMSampleBuffer?
        var timimgInfo: CMSampleTimingInfo = timingInfo
        var videoInfo: CMVideoFormatDescription!
        CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
        CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
                                           imageBuffer: cvPixelBuffer,
                                           dataReady: true,
                                           makeDataReadyCallback: nil,
                                           refcon: nil,
                                           formatDescription: videoInfo,
                                           sampleTiming: &timimgInfo,
                                           sampleBufferOut: &sampleBuffer)

        return sampleBuffer
    }
private extension UIImage {
    func toCVPixelBuffer() -> CVPixelBuffer? {
        let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
        var pixelBuffer : CVPixelBuffer?
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
        guard status == kCVReturnSuccess else {
            return nil
        }

        if let pixelBuffer = pixelBuffer {
            CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
            let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)

            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)

            context?.translateBy(x: 0, y: self.size.height)
            context?.scaleBy(x: 1.0, y: -1.0)

            UIGraphicsPushContext(context!)
            self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
            UIGraphicsPopContext()
            CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))

            return pixelBuffer
        }

        return nil
    }
}

private final class Sample {
    let timingInfo: CMSampleTimingInfo

    init(sampleBuffer: CMSampleBuffer) {
        let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        let duration = CMSampleBufferGetDuration(sampleBuffer)
        let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
        timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
    }
}

I'm making iOS video recording app. When I convert CMSampleBuffer to UIImage and add timestamp and then covert it back to CMSampleBuffer and append to AVAssetWriterInput. When I did not add timestamp and add received CMSampleBuffer directly to AVAssetWriterInput it finish recording correctly but when I add timestamp to received image, it fails recording video. How can I fix this?

    private func setupCaptureSession() {
        session.sessionPreset = .vga640x480

        guard
            let videoDevice = AVCaptureDevice.default(for: .video),
            let audioDevice = AVCaptureDevice.default(for: .audio),
            let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
            let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
            fatalError()
        }

        session.beginConfiguration()
        session.addInput(videoInput)
        session.addInput(audioInput)
        session.addOutput(videoOutput)
        session.addOutput(audioOutput)
        session.commitConfiguration()

        DispatchQueue.main.async { [weak self] in
            self?.session.startRunning()
        }
    }

    private func startRecording() {
        self.startUnixtime = DateUtility.getUnixtime()
        
        self.startTimeForDisplayingTimeCounter = Date()
        self.startTimer()
        self.elapsedTimeLabel.text = "00:00:00"
        
        // AVAssetWriter
        assetWriter = try! AVAssetWriter(outputURL: self.exportURL!, fileType: .mov)

        // video
        let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
            AVVideoCodecKey : AVVideoCodecType.h264,
            AVVideoWidthKey : 640,
            AVVideoHeightKey : 480
        ])
        videoInput.expectsMediaDataInRealTime = true
        assetWriter?.add(videoInput)

        // audio
        let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
        audioInput.expectsMediaDataInRealTime = true
        assetWriter?.add(audioInput)

        assetWriter?.startWriting()

        // Delegate
        let queue = DispatchQueue.global()
        videoOutput.setSampleBufferDelegate(self, queue: queue)
        audioOutput.setSampleBufferDelegate(self, queue: queue)
    }

    private func finishRecording() {
        self.endUnixtime = DateUtility.getUnixtime()
        
        self.elapsedTimeLabel.text = "00:00:00"
        self.timer?.invalidate()
        
        videoOutput.setSampleBufferDelegate(nil, queue: nil)
        audioOutput.setSampleBufferDelegate(nil, queue: nil)

        startTime = nil

        assetWriter?.finishWriting { [weak self] in
            guard let self = self else { return }
            guard self.assetWriter!.status == .completed else { fatalError("failed recording") }

            self.saveToPhotoLibrary { isSaveToPhotoLibrarySucceed in
                print("vide saved to photo library")
                
                guard isSaveToPhotoLibrarySucceed else {
                    print("Save to photo library failed")
                    return
                }
                
                self.saveToRealmFromTempVideo {
                    self.uploadVideoToServer()
                }
            }
        }
    }


    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {

        guard isRecording else { fatalError() }

        guard CMSampleBufferDataIsReady(sampleBuffer) else {
            print("not ready")
            return
        }

        if startTime == nil {
            startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
            assetWriter?.startSession(atSourceTime: startTime!)
        }

        // Append video or audio
        
        let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
        
        if mediaType == .video {
            appendVideo(from: sampleBuffer)
        } else if mediaType == .audio {
            appendAudio(from: sampleBuffer)
        } else {
            fatalError("should not reach here")
        }
    }

    // Append Video
    func appendVideo(from sampleBuffer: CMSampleBuffer) {
        
        // - Guards
        
        guard let videoInput = assetWriter?.inputs.first(where: { $0.mediaType == .video }) else {
            print("video input not found")
            return
        }
        
        guard videoInput.isReadyForMoreMediaData else {
            print("video input not ready for more media data")
            return
        }
        
        // - Timestamp
        
        let sample: Sample = Sample(sampleBuffer: sampleBuffer)
        
        guard let ciImage = generateCIImage(from: sampleBuffer) else {
            print("CIImage creation from sampleBuffer failed")
            return
        }
        
        let uiImage = UIImage(ciImage: ciImage)
        
        guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
            fatalError("should not reach here")
        }
        
        guard let timestampAddedCvpixelBuffer = timestampAddedImage.toCVPixelBuffer() else {
            print("CVPixelBuffer creation from CIImage failed")
            return
        }
        
        guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
            print("CMSampleBuffer creation from CVPixelBufer failed")
            return
        }
        
        DispatchQueue.main.sync { [weak self] in
            self?.compositeImageView.image = timestampAddedImage
        }
        
        print("append video")
        videoInput.append(timestampAddedSampleBuffer)
    }

    func addTimestamp(on image: UIImage) -> UIImage? {
        let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
        UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
        image.draw(in: imageRect)
        
        // Text Attributes
        let textColor = UIColor.white
        let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
        
        let textFontAttributes = [
            NSAttributedString.Key.font: textFont,
            NSAttributedString.Key.foregroundColor: textColor,
            NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
        ]
        
        let formatter = DateFormatter()
        formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
        let timestamp: NSString = formatter.string(from: Date()) as NSString
        
        let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
        timestamp.draw(in: textRect, withAttributes: textFontAttributes)
        
        // New Image
        let newImage = UIGraphicsGetImageFromCurrentImageContext();
        UIGraphicsEndImageContext()
        
        return newImage
    }

    func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
        var sampleBuffer: CMSampleBuffer?
        var timimgInfo: CMSampleTimingInfo = timingInfo
        var videoInfo: CMVideoFormatDescription!
        CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
        CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
                                           imageBuffer: cvPixelBuffer,
                                           dataReady: true,
                                           makeDataReadyCallback: nil,
                                           refcon: nil,
                                           formatDescription: videoInfo,
                                           sampleTiming: &timimgInfo,
                                           sampleBufferOut: &sampleBuffer)

        return sampleBuffer
    }
private extension UIImage {
    func toCVPixelBuffer() -> CVPixelBuffer? {
        let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
        var pixelBuffer : CVPixelBuffer?
        let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
        guard status == kCVReturnSuccess else {
            return nil
        }

        if let pixelBuffer = pixelBuffer {
            CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
            let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)

            let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
            let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)

            context?.translateBy(x: 0, y: self.size.height)
            context?.scaleBy(x: 1.0, y: -1.0)

            UIGraphicsPushContext(context!)
            self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
            UIGraphicsPopContext()
            CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))

            return pixelBuffer
        }

        return nil
    }
}

private final class Sample {
    let timingInfo: CMSampleTimingInfo

    init(sampleBuffer: CMSampleBuffer) {
        let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
        let duration = CMSampleBufferGetDuration(sampleBuffer)
        let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
        timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
    }
}

如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

扫码二维码加入Web技术交流群

发布评论

需要 登录 才能够评论, 你可以免费 注册 一个本站的账号。
列表为空,暂无数据
我们使用 Cookies 和其他技术来定制您的体验包括您的登录状态等。通过阅读我们的 隐私政策 了解更多相关信息。 单击 接受 或继续使用网站,即表示您同意使用 Cookies 和您的相关数据。
原文