The goal: Create a VideoRecordingManager
.
It will be responsible for presenting the live preview, capture the video with audio and via a delegate method inform the relevant vc that a frame was captured and can be processed.
The issue: captureOutput
from the AVCaptureVideoDataOutputSampleBufferDelegate
is not being called.
Current status: The live preview is working as should, I can what's being captured from my front camera on the previewView. But the breakpoint inside the captureOutput() is not being hit.
However (!) - if I comment out everything related to the audioInput and audioOutput in the configureSessionVideoAndAudio()
- the captureOutput
does get called!
My code:
class OAVideoRecordingManager: NSObject,
AVCaptureVideoDataOutputSampleBufferDelegate,
AVCaptureAudioDataOutputSampleBufferDelegate,
AVCaptureDataOutputSynchronizerDelegate {
var delegate: VideoRecordingDelegate?
// MARK: - Live Preview
/** The UIView on which the LivePreview of the video will be shown to the user */
var previewView: UIView
/** Flag representing whether we are currently presenting the LivePreview to the user */
fileprivate var isShowingLivePreview = false
/** The AVCaptureVideoPreviewLayer which will be added on top of videoPhotoPreviewView. */
fileprivate var previewLayer: AVCaptureVideoPreviewLayer?
// MARK: - Voice
fileprivate var audioSharedSession: AVAudioSession?
fileprivate var urlAudioFile: URL?
// MARK: - Video
/** Flag responsible for starting and stopping the capturing. Represents the capturing status - on or off */
fileprivate var isRecordingVideo: Bool = false
/** Handles the media capture */
fileprivate var captureSession : AVCaptureSession?
fileprivate var videoWriter: AVAssetWriter!
fileprivate var videoWriterInput: AVAssetWriterInput!
fileprivate var audioWriterInput: AVAssetWriterInput!
/** The video result of the recording */
fileprivate lazy var videoDataOutput = AVCaptureVideoDataOutput()
/** The audio result of the recording */
fileprivate lazy var audioDataOutput = AVCaptureAudioDataOutput()
/** Synchronizes the data output from video & audio, ensuring that the data remains synchronized. */
var outputSynch: AVCaptureDataOutputSynchronizer!
/** Timestamp to synchronize the video frames with the corresponding audio samples */
fileprivate var sessionAtSourceTime: CMTime?
private var _filename = ""
init(previewView: UIView, delegate: VideoRecordingDelegate) {
self.previewView = previewView
self.delegate = delegate
}
func setupRecordingManager() throws {
try setupVoice()
try setupWriter()
sessionAtSourceTime = nil
}
/** Set up the URL for an audio file within the NSTemporaryDirectory and configure the shared AVAudioSession for recording */
fileprivate func setupVoice() throws {
urlAudioFile = URL(fileURLWithPath: NSTemporaryDirectory()).appendingPathComponent("ix_audio.wav")
audioSharedSession = AVAudioSession.sharedInstance()
guard let _ = audioSharedSession else {
print("Error: Unable to access the shared audio session.")
throw NSError()
}
try audioSharedSession!.setCategory(AVAudioSession.Category.record)
}
/** Init AVAssetWriter with configured video and audio + init the writing process for the writer. Does NOT actually start capturing, just setup */
fileprivate func setupWriter() throws {
videoWriter = nil
// Generate a unique filename
_filename = UUID().uuidString
let videoPath = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!.appendingPathComponent("\(_filename).mp4")
videoWriter = try AVAssetWriter(url: videoPath, fileType: AVFileType.mp4)
// Add video input
let videoOutputSettings: [String: Any] = [
AVVideoCodecKey: AVVideoCodecType.h264,
AVVideoWidthKey: previewView.bounds.width,
AVVideoScalingModeKey: AVVideoScalingModeResizeAspectFill,
AVVideoHeightKey: previewView.bounds.height,
AVVideoCompressionPropertiesKey: [
AVVideoAverageBitRateKey: 2300000,
],
]
videoWriterInput = AVAssetWriterInput(mediaType: .video, outputSettings: videoOutputSettings)
videoWriterInput.mediaTimeScale = CMTimeScale(bitPattern: 600)
videoWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(videoWriterInput) {
videoWriter.add(videoWriterInput)
}
// Add audio input
let audioOutputSettings: [String: Any] = [
AVFormatIDKey: kAudioFormatMPEG4AAC,
AVNumberOfChannelsKey: 1,
AVSampleRateKey: 44100,
AVEncoderBitRateKey: 64000,
]
audioWriterInput = AVAssetWriterInput(mediaType: .audio, outputSettings: audioOutputSettings)
audioWriterInput.expectsMediaDataInRealTime = true
if videoWriter.canAdd(audioWriterInput) {
videoWriter.add(audioWriterInput)
}
videoWriter.startWriting() // Ready to start writing file when ordered to
}
@MainActor
fileprivate func initiateCaptureSession() throws {
if captureSession == nil {
captureSession = AVCaptureSession()
if captureSession!.canSetSessionPreset(.hd1280x720) {
captureSession!.sessionPreset = .hd1280x720
} else {
throw NSError()
}
}
try configureSessionVideoAndAudio()
captureSession?.startRunning()
}
/** Set up a capture session with audio and video data outputs, define delegates and init a synchronizer for the data outputs */
@MainActor
fileprivate func configureSessionVideoAndAudio() throws {
guard let audioDevice = AVCaptureDevice.default(for: .audio),
let videoDevice = AVCaptureDevice.default(.builtInWideAngleCamera, for: .video, position: .front),
let session = captureSession
else {
throw NSError()
}
// Disable automatic audio session configuration to create our own
session.automaticallyConfiguresApplicationAudioSession = false
session.beginConfiguration()
// Add input devices -
// Add microphone to the session
let audioInput = try AVCaptureDeviceInput(device: audioDevice)
if captureSession!.canAddInput(audioInput) {
captureSession!.addInput(audioInput)
}
// Add camera to the session
let videoInput = try AVCaptureDeviceInput(device: videoDevice)
if captureSession!.canAddInput(videoInput) {
captureSession!.addInput(videoInput)
}
// Add outputs -
// Set up video data output
let queue = DispatchQueue(label: "com.cvcamrecorder.record-video.data-output")
videoDataOutput.alwaysDiscardsLateVideoFrames = false
if session.canAddOutput(videoDataOutput) {
videoDataOutput.setSampleBufferDelegate(self, queue: queue)
session.addOutput(videoDataOutput)
videoDataOutput.connection(with: .video)?.videoOrientation = .portrait
}
// Set up audio data output
if session.canAddOutput(audioDataOutput) {
audioDataOutput.setSampleBufferDelegate(self, queue: queue)
session.addOutput(audioDataOutput)
}
// Commit the configuration
session.commitConfiguration()
// Initialize the synchronizer after adding outputs to the session
if session.outputs.contains(audioDataOutput) && session.outputs.contains(videoDataOutput) {
outputSynch = AVCaptureDataOutputSynchronizer(dataOutputs: [videoDataOutput, audioDataOutput])
outputSynch.setDelegate(self, queue: queue)
}
}
@MainActor
@objc func startShowingLivePreview() throws {
if !isShowingLivePreview && isHavePermission(for: .video) && isHavePermission(for: .audio) {
try initiateCaptureSession()
setupVideoPreviewLayer()
isShowingLivePreview = true
delegate?.livePreviewStartedShowing()
}
}
private func setupVideoPreviewLayer() {
guard let captureSession else { return }
let previewLayerRect = previewView.bounds
previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
previewLayer!.videoGravity = .resizeAspectFill
previewLayer!.bounds = previewLayerRect
previewLayer!.position = CGPoint(x: previewLayerRect.midX, y: previewLayerRect.midY)
previewLayer!.connection?.videoOrientation = .portrait
previewView.layer.addSublayer(previewLayer!)
}
@objc func stopShowingLivePreview() {
if isShowingLivePreview {
isShowingLivePreview = false
previewLayer?.removeFromSuperlayer()
captureSession?.stopRunning()
captureSession = nil
delegate?.livePreviewStoppedShowing()
}
}
/** A delegate (AVCaptureVideoDataOutputSampleBufferDelegate) method that is called whenever a new video FRAME is captured to allow analysis of the frame if needed.
Functions also starts the videowriter to create the final video. */
public func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
if CMSampleBufferDataIsReady(sampleBuffer) {
if let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
if isRecordingVideo {
// Capture video and audio in one
if sessionAtSourceTime == nil {
//Start writing
sessionAtSourceTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
videoWriter.startSession(atSourceTime: sessionAtSourceTime!)
}
}
delegate?.analyzeVideoFrameOutput(with: pixelBuffer)
}
}
}
/** A delegate method that is called whenever synchronized data is captured.
Check the synchronized data, make sure it wasn't dropped and append it to the writer input */
func dataOutputSynchronizer(_ synchronizer: AVCaptureDataOutputSynchronizer,
didOutput synchronizedDataCollection: AVCaptureSynchronizedDataCollection
) {
guard isRecordingVideo, let _ = sessionAtSourceTime else { return }
processSynchronizedData(synchronizedDataCollection,
for: videoDataOutput,
using: &videoWriterInput,
debugName: "video")
processSynchronizedData(synchronizedDataCollection,
for: audioDataOutput,
using: &audioWriterInput,
debugName: "audio")
}
private func processSynchronizedData(_ synchronizedDataCollection: AVCaptureSynchronizedDataCollection,
for dataOutput: AVCaptureOutput,
using writerInput: inout AVAssetWriterInput,
debugName: String) {
guard let syncedData = synchronizedDataCollection.synchronizedData(for: dataOutput) as? AVCaptureSynchronizedSampleBufferData else { return }
guard !syncedData.sampleBufferWasDropped else {
print("Dropped \(debugName) data")
return
}
let sampleBuffer = syncedData.sampleBuffer
if isRecordingVideo && writerInput.isReadyForMoreMediaData {
writerInput.append(sampleBuffer)
}
}
func isHavePermission(for type: MediaPermissionType) -> Bool {
let status = AVCaptureDevice.authorizationStatus(for: type.asMediaType())
switch status {
case .authorized:
return true
default:
return false
}
}}
I've searched in these threads:
1- captureOutput from AVCaptureVideoDataOutputSampleBufferDelegate is not being called
2- captureOutput function isn't called using setSampleBufferDelegate
3- captureOutput not being called by AVCaptureAudioDataOutputSampleBufferDelegate
4- captureOutput() function is never called swift4
Nothing solves my issue.
Would appreciate any help!