How to save an array of CGImages to a mp4 file

48 views Asked by At

Ive have an array of CgImages that I get from an AvCapture session and I've been tring to turn this array into a mp4 or mov video. Every time I run the code I get ther error "Error writing video: The operation could not be completed Underlying error: The operation couldn’t be completed. (OSStatus error -12780.)" I think its about the pixel formats not matching. This is my code for saving the video

func createMP4FromCGImages(cgImageArray: [CGImage], frameWidth: Int, frameHeight: Int, framesPerSecond: Int, outputURL: URL) {
    do {
        let pixelFormatType = kCVPixelFormatType_32BGRA
            let colorSpace = CGColorSpaceCreateDeviceRGB()
        
           let videoSettings: [String: Any] = [
               AVVideoCodecKey: AVVideoCodecType.h264,
               AVVideoWidthKey: frameWidth,
               AVVideoHeightKey: frameHeight,
               AVVideoCompressionPropertiesKey: [
                   AVVideoAverageBitRateKey: 2000000, // 2 Mbps average bit rate
                   AVVideoProfileLevelKey: AVVideoProfileLevelH264High40, // H.264 high profile, level 4.0
               ],
              
               AVVideoColorPropertiesKey: [
                              AVVideoColorPrimariesKey: AVVideoColorPrimaries_ITU_R_709_2,
                              AVVideoTransferFunctionKey: AVVideoTransferFunction_ITU_R_709_2,
                              AVVideoYCbCrMatrixKey: AVVideoYCbCrMatrix_ITU_R_709_2,
                          ],
           ]

           let assetWriter = try AVAssetWriter(outputURL: outputURL, fileType: AVFileType.mov)
           let videoInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoSettings)

        
        
        let pixelBufferAttributes: [String: Any] = [
               kCVPixelBufferPixelFormatTypeKey as String: NSNumber(value: kCVPixelFormatType_32BGRA),
           ]

           // Create an AVAssetWriterInputPixelBufferAdaptor and associate it with your asset writer input
           let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: videoInput, sourcePixelBufferAttributes: pixelBufferAttributes)

           if assetWriter.canAdd(videoInput) {
               assetWriter.add(videoInput)
           } else {
               print("Cannot add video input to asset writer.")
               return
           }

           assetWriter.startWriting()
           assetWriter.startSession(atSourceTime: .zero)

           var currentPresentationTime = CMTime.zero

           for image in cgImageArray {
               autoreleasepool {
                   let sampleBuffer = createSampleBufferFromCGImage(image, presentationTime: currentPresentationTime, frameRate: framesPerSecond)

                   if let sampleBuffer = sampleBuffer {
                       if videoInput.isReadyForMoreMediaData {
                           videoInput.append(sampleBuffer)
                           let frameDuration = CMTimeMake(value: 1, timescale: Int32(framesPerSecond))
                           currentPresentationTime = CMTimeAdd(currentPresentationTime, frameDuration)
                       } else {
                           print("Video input not ready for more data.")
                           // Handle this situation as needed.
                       }
                   } else {
                       print("Error creating sample buffer.")
                       // Handle this situation as needed.
                   }
               }
           }

           assetWriter.finishWriting {
               if let error = assetWriter.error as NSError? {
                   print("Error writing video: \(error.localizedDescription)")

                   if let underlyingError = error.userInfo[NSUnderlyingErrorKey] as? NSError {
                       print("Underlying error: \(underlyingError.localizedDescription)")
                   }
               } else {
                   print("Video saved successfully at \(outputURL)")
               }
           }
       } catch {
           print("Error creating AVAssetWriter: \(error)")
       }
   }

   func createSampleBufferFromCGImage(_ image: CGImage, presentationTime: CMTime, frameRate: Int) -> CMSampleBuffer? {
       var pixelBuffer: CVPixelBuffer?
       let options: [String: Any] = [
           kCVPixelBufferCGImageCompatibilityKey as String: true,
           kCVPixelBufferCGBitmapContextCompatibilityKey as String: true,
       ]

       let status = CVPixelBufferCreate(kCFAllocatorDefault, image.width, image.height, kCVPixelFormatType_32BGRA, options as CFDictionary, &pixelBuffer)

       guard status == kCVReturnSuccess, let buffer = pixelBuffer else {
           return nil
       }

       CVPixelBufferLockBaseAddress(buffer, [])
       let data = CVPixelBufferGetBaseAddress(buffer)
       let rgbColorSpace = CGColorSpaceCreateDeviceRGB()
       let context = CGContext(
           data: data,
           width: image.width,
           height: image.height,
           bitsPerComponent: 8,
           bytesPerRow: CVPixelBufferGetBytesPerRow(buffer),
           space: rgbColorSpace,
           bitmapInfo: CGImageAlphaInfo.noneSkipFirst.rawValue
       )

       context?.draw(image, in: CGRect(x: 0, y: 0, width: image.width, height: image.height))

       var timingInfo = CMSampleTimingInfo(
           duration: CMTimeMake(value: 1, timescale: Int32(frameRate)),
           presentationTimeStamp: presentationTime,
           decodeTimeStamp: .invalid
       )

       var sampleBuffer: CMSampleBuffer?
       
       // Create a format description separately
       var formatDescription: CMVideoFormatDescription?
       CMVideoFormatDescriptionCreateForImageBuffer(
           allocator: kCFAllocatorDefault,
           imageBuffer: buffer,
           formatDescriptionOut: &formatDescription
       )
       
       CMSampleBufferCreateReadyWithImageBuffer(
           allocator: kCFAllocatorDefault,
           imageBuffer: buffer,
           formatDescription: formatDescription!,
           sampleTiming: &timingInfo,
           sampleBufferOut: &sampleBuffer
       )
       
       CVPixelBufferUnlockBaseAddress(buffer, [])

       return sampleBuffer
   }

My AVCapture session is set up to save in the kCVPixelFormatType_32BGRA format AVCaptureVideoDataOutput.withPixelFormatType(kCVPixelFormatType_32BGRA)

Ive tried cheching all the pixel formats in the debugger but evrything seems to line up with the inputs

0

There are 0 answers