视频录制将时间戳添加到视频时失败
我正在制作iOS视频录制应用程序。当我将cmsamplebuffer转换为uiimage并添加时间戳,然后将其遮盖回CMSampleBuffer,然后将其附加到AvassetWriterInput。当我没有添加时间戳并将接收到的cmsamplebuffer直接添加到avassetWriterInput中时,它可以正确录制,但是当我将时间戳添加到接收到的图像中时,它会失败录制视频。我该如何解决?
private func setupCaptureSession() {
session.sessionPreset = .vga640x480
guard
let videoDevice = AVCaptureDevice.default(for: .video),
let audioDevice = AVCaptureDevice.default(for: .audio),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
fatalError()
}
session.beginConfiguration()
session.addInput(videoInput)
session.addInput(audioInput)
session.addOutput(videoOutput)
session.addOutput(audioOutput)
session.commitConfiguration()
DispatchQueue.main.async { [weak self] in
self?.session.startRunning()
}
}
private func startRecording() {
self.startUnixtime = DateUtility.getUnixtime()
self.startTimeForDisplayingTimeCounter = Date()
self.startTimer()
self.elapsedTimeLabel.text = "00:00:00"
// AVAssetWriter
assetWriter = try! AVAssetWriter(outputURL: self.exportURL!, fileType: .mov)
// video
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 640,
AVVideoHeightKey : 480
])
videoInput.expectsMediaDataInRealTime = true
assetWriter?.add(videoInput)
// audio
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
audioInput.expectsMediaDataInRealTime = true
assetWriter?.add(audioInput)
assetWriter?.startWriting()
// Delegate
let queue = DispatchQueue.global()
videoOutput.setSampleBufferDelegate(self, queue: queue)
audioOutput.setSampleBufferDelegate(self, queue: queue)
}
private func finishRecording() {
self.endUnixtime = DateUtility.getUnixtime()
self.elapsedTimeLabel.text = "00:00:00"
self.timer?.invalidate()
videoOutput.setSampleBufferDelegate(nil, queue: nil)
audioOutput.setSampleBufferDelegate(nil, queue: nil)
startTime = nil
assetWriter?.finishWriting { [weak self] in
guard let self = self else { return }
guard self.assetWriter!.status == .completed else { fatalError("failed recording") }
self.saveToPhotoLibrary { isSaveToPhotoLibrarySucceed in
print("vide saved to photo library")
guard isSaveToPhotoLibrarySucceed else {
print("Save to photo library failed")
return
}
self.saveToRealmFromTempVideo {
self.uploadVideoToServer()
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard isRecording else { fatalError() }
guard CMSampleBufferDataIsReady(sampleBuffer) else {
print("not ready")
return
}
if startTime == nil {
startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
assetWriter?.startSession(atSourceTime: startTime!)
}
// Append video or audio
let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
if mediaType == .video {
appendVideo(from: sampleBuffer)
} else if mediaType == .audio {
appendAudio(from: sampleBuffer)
} else {
fatalError("should not reach here")
}
}
// Append Video
func appendVideo(from sampleBuffer: CMSampleBuffer) {
// - Guards
guard let videoInput = assetWriter?.inputs.first(where: { $0.mediaType == .video }) else {
print("video input not found")
return
}
guard videoInput.isReadyForMoreMediaData else {
print("video input not ready for more media data")
return
}
// - Timestamp
let sample: Sample = Sample(sampleBuffer: sampleBuffer)
guard let ciImage = generateCIImage(from: sampleBuffer) else {
print("CIImage creation from sampleBuffer failed")
return
}
let uiImage = UIImage(ciImage: ciImage)
guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
fatalError("should not reach here")
}
guard let timestampAddedCvpixelBuffer = timestampAddedImage.toCVPixelBuffer() else {
print("CVPixelBuffer creation from CIImage failed")
return
}
guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
print("CMSampleBuffer creation from CVPixelBufer failed")
return
}
DispatchQueue.main.sync { [weak self] in
self?.compositeImageView.image = timestampAddedImage
}
print("append video")
videoInput.append(timestampAddedSampleBuffer)
}
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
let newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext()
return newImage
}
func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
var sampleBuffer: CMSampleBuffer?
var timimgInfo: CMSampleTimingInfo = timingInfo
var videoInfo: CMVideoFormatDescription!
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: cvPixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo,
sampleTiming: &timimgInfo,
sampleBufferOut: &sampleBuffer)
return sampleBuffer
}
private extension UIImage {
func toCVPixelBuffer() -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer : CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
guard status == kCVReturnSuccess else {
return nil
}
if let pixelBuffer = pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: self.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
return nil
}
}
private final class Sample {
let timingInfo: CMSampleTimingInfo
init(sampleBuffer: CMSampleBuffer) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let duration = CMSampleBufferGetDuration(sampleBuffer)
let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
}
}
I'm making iOS video recording app. When I convert CMSampleBuffer to UIImage and add timestamp and then covert it back to CMSampleBuffer and append to AVAssetWriterInput. When I did not add timestamp and add received CMSampleBuffer directly to AVAssetWriterInput it finish recording correctly but when I add timestamp to received image, it fails recording video. How can I fix this?
private func setupCaptureSession() {
session.sessionPreset = .vga640x480
guard
let videoDevice = AVCaptureDevice.default(for: .video),
let audioDevice = AVCaptureDevice.default(for: .audio),
let videoInput = try? AVCaptureDeviceInput(device: videoDevice),
let audioInput = try? AVCaptureDeviceInput(device: audioDevice) else {
fatalError()
}
session.beginConfiguration()
session.addInput(videoInput)
session.addInput(audioInput)
session.addOutput(videoOutput)
session.addOutput(audioOutput)
session.commitConfiguration()
DispatchQueue.main.async { [weak self] in
self?.session.startRunning()
}
}
private func startRecording() {
self.startUnixtime = DateUtility.getUnixtime()
self.startTimeForDisplayingTimeCounter = Date()
self.startTimer()
self.elapsedTimeLabel.text = "00:00:00"
// AVAssetWriter
assetWriter = try! AVAssetWriter(outputURL: self.exportURL!, fileType: .mov)
// video
let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: [
AVVideoCodecKey : AVVideoCodecType.h264,
AVVideoWidthKey : 640,
AVVideoHeightKey : 480
])
videoInput.expectsMediaDataInRealTime = true
assetWriter?.add(videoInput)
// audio
let audioInput = AVAssetWriterInput(mediaType: .audio, outputSettings: nil)
audioInput.expectsMediaDataInRealTime = true
assetWriter?.add(audioInput)
assetWriter?.startWriting()
// Delegate
let queue = DispatchQueue.global()
videoOutput.setSampleBufferDelegate(self, queue: queue)
audioOutput.setSampleBufferDelegate(self, queue: queue)
}
private func finishRecording() {
self.endUnixtime = DateUtility.getUnixtime()
self.elapsedTimeLabel.text = "00:00:00"
self.timer?.invalidate()
videoOutput.setSampleBufferDelegate(nil, queue: nil)
audioOutput.setSampleBufferDelegate(nil, queue: nil)
startTime = nil
assetWriter?.finishWriting { [weak self] in
guard let self = self else { return }
guard self.assetWriter!.status == .completed else { fatalError("failed recording") }
self.saveToPhotoLibrary { isSaveToPhotoLibrarySucceed in
print("vide saved to photo library")
guard isSaveToPhotoLibrarySucceed else {
print("Save to photo library failed")
return
}
self.saveToRealmFromTempVideo {
self.uploadVideoToServer()
}
}
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
guard isRecording else { fatalError() }
guard CMSampleBufferDataIsReady(sampleBuffer) else {
print("not ready")
return
}
if startTime == nil {
startTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
assetWriter?.startSession(atSourceTime: startTime!)
}
// Append video or audio
let mediaType: AVMediaType = output is AVCaptureVideoDataOutput ? .video : .audio
if mediaType == .video {
appendVideo(from: sampleBuffer)
} else if mediaType == .audio {
appendAudio(from: sampleBuffer)
} else {
fatalError("should not reach here")
}
}
// Append Video
func appendVideo(from sampleBuffer: CMSampleBuffer) {
// - Guards
guard let videoInput = assetWriter?.inputs.first(where: { $0.mediaType == .video }) else {
print("video input not found")
return
}
guard videoInput.isReadyForMoreMediaData else {
print("video input not ready for more media data")
return
}
// - Timestamp
let sample: Sample = Sample(sampleBuffer: sampleBuffer)
guard let ciImage = generateCIImage(from: sampleBuffer) else {
print("CIImage creation from sampleBuffer failed")
return
}
let uiImage = UIImage(ciImage: ciImage)
guard let timestampAddedImage = self.addTimestamp(on: uiImage) else {
fatalError("should not reach here")
}
guard let timestampAddedCvpixelBuffer = timestampAddedImage.toCVPixelBuffer() else {
print("CVPixelBuffer creation from CIImage failed")
return
}
guard let timestampAddedSampleBuffer = generateCMSampleBuffer(from: timestampAddedCvpixelBuffer, timingInfo: sample.timingInfo) else {
print("CMSampleBuffer creation from CVPixelBufer failed")
return
}
DispatchQueue.main.sync { [weak self] in
self?.compositeImageView.image = timestampAddedImage
}
print("append video")
videoInput.append(timestampAddedSampleBuffer)
}
func addTimestamp(on image: UIImage) -> UIImage? {
let imageRect = CGRect(x: 0, y: 0, width: image.size.width, height: image.size.height)
UIGraphicsBeginImageContextWithOptions(image.size, true, 0.0)
image.draw(in: imageRect)
// Text Attributes
let textColor = UIColor.white
let textFont = UIFont.systemFont(ofSize: FontSize.sizeL, weight: .bold)
let textFontAttributes = [
NSAttributedString.Key.font: textFont,
NSAttributedString.Key.foregroundColor: textColor,
NSAttributedString.Key.backgroundColor: UIColor(hex: ColorConstants.black, alpha: 0.4)
]
let formatter = DateFormatter()
formatter.dateFormat = "yyyy/MM/dd HH:mm:ss:SSS"
let timestamp: NSString = formatter.string(from: Date()) as NSString
let textRect = CGRect(x: 6.0, y: 6.0, width: image.size.width, height: 32)
timestamp.draw(in: textRect, withAttributes: textFontAttributes)
// New Image
let newImage = UIGraphicsGetImageFromCurrentImageContext();
UIGraphicsEndImageContext()
return newImage
}
func generateCMSampleBuffer(from cvPixelBuffer: CVPixelBuffer, timingInfo: CMSampleTimingInfo) -> CMSampleBuffer? {
var sampleBuffer: CMSampleBuffer?
var timimgInfo: CMSampleTimingInfo = timingInfo
var videoInfo: CMVideoFormatDescription!
CMVideoFormatDescriptionCreateForImageBuffer(allocator: nil, imageBuffer: cvPixelBuffer, formatDescriptionOut: &videoInfo)
CMSampleBufferCreateForImageBuffer(allocator: kCFAllocatorDefault,
imageBuffer: cvPixelBuffer,
dataReady: true,
makeDataReadyCallback: nil,
refcon: nil,
formatDescription: videoInfo,
sampleTiming: &timimgInfo,
sampleBufferOut: &sampleBuffer)
return sampleBuffer
}
private extension UIImage {
func toCVPixelBuffer() -> CVPixelBuffer? {
let attrs = [kCVPixelBufferCGImageCompatibilityKey: kCFBooleanTrue, kCVPixelBufferCGBitmapContextCompatibilityKey: kCFBooleanTrue] as CFDictionary
var pixelBuffer : CVPixelBuffer?
let status = CVPixelBufferCreate(kCFAllocatorDefault, Int(self.size.width), Int(self.size.height), kCVPixelFormatType_32ARGB, attrs, &pixelBuffer)
guard status == kCVReturnSuccess else {
return nil
}
if let pixelBuffer = pixelBuffer {
CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
let context = CGContext(data: pixelData, width: Int(self.size.width), height: Int(self.size.height), bitsPerComponent: 8, bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer), space: rgbColorSpace, bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue)
context?.translateBy(x: 0, y: self.size.height)
context?.scaleBy(x: 1.0, y: -1.0)
UIGraphicsPushContext(context!)
self.draw(in: CGRect(x: 0, y: 0, width: self.size.width, height: self.size.height))
UIGraphicsPopContext()
CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: 0))
return pixelBuffer
}
return nil
}
}
private final class Sample {
let timingInfo: CMSampleTimingInfo
init(sampleBuffer: CMSampleBuffer) {
let presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let duration = CMSampleBufferGetDuration(sampleBuffer)
let decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
timingInfo = CMSampleTimingInfo(duration: duration, presentationTimeStamp: presentationTimeStamp, decodeTimeStamp: decodeTimeStamp)
}
}
如果你对这篇内容有疑问,欢迎到本站社区发帖提问 参与讨论,获取更多帮助,或者扫码二维码加入 Web 技术交流群。

绑定邮箱获取回复消息
由于您还没有绑定你的真实邮箱,如果其他用户或者作者回复了您的评论,将不能在第一时间通知您!
更多
发布评论