For some odd reason AVCaptureVideoDataOutputSampleBufferDelegate isn't triggering. I've added the delegate and everything, i'm not sure why it isn't being ran in my code. Can anybody help me figure out why?
Delegates in my .h
@class AVPlayer;
@class AVPlayerClass;
@interface Camera : UIViewController <UIImagePickerControllerDelegate, UINavigationControllerDelegate, AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureFileOutputRecordingDelegate> {
.m code (initializeCamera is being called in ViewDidLoad)
-(void)initializeCamera {
Session = [[AVCaptureSession alloc]init];
[Session setSessionPreset:AVCaptureSessionPresetPhoto];
AVCaptureDevice *audioCaptureDevice = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeAudio];
NSError *error = nil;
AVCaptureDeviceInput *audioInput = [AVCaptureDeviceInput deviceInputWithDevice:audioCaptureDevice error:&error];
[Session addInput:audioInput];
// Preview Layer***************
AVCaptureVideoPreviewLayer *previewLayer = [[AVCaptureVideoPreviewLayer alloc]initWithSession:Session];
[previewLayer setVideoGravity:AVLayerVideoGravityResizeAspectFill];
CALayer *rootLayer = [[self view] layer];
[rootLayer setMasksToBounds:YES];
CGRect frame = self.CameraView.frame;
[previewLayer setFrame:frame];
[rootLayer insertSublayer:previewLayer atIndex:0];
[Session beginConfiguration];
//Remove existing input
[Session removeInput:newVideoInput];
newCamera = [self cameraWithPosition:AVCaptureDevicePositionBack];
// FrontCamera = NO;
[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
//Check size based configs are supported before setting them
[Session setSessionPreset:AVCaptureSessionPreset1920x1080];
//Add input to session
NSError *err = nil;
newVideoInput = [[AVCaptureDeviceInput alloc] initWithDevice:newCamera error:&err];
if(!newVideoInput || err)
{
NSLog(@"Error creating capture device input: %@", err.localizedDescription);
}
else if ([Session canAddInput:newVideoInput])
{
[Session addInput:newVideoInput];
}
[Session commitConfiguration];
stillImageOutput = [[AVCaptureStillImageOutput alloc]init];
NSDictionary *outputSettings = [[NSDictionary alloc] initWithObjectsAndKeys:AVVideoCodecJPEG, AVVideoCodecKey, nil];
[stillImageOutput setOutputSettings:outputSettings];
[Session addOutput:stillImageOutput];
MovieFileOutput = [[AVCaptureMovieFileOutput alloc]init];
Float64 TotalSeconds = 10;
int32_t preferredTimeScale = 60;
CMTime maxDuration = CMTimeMakeWithSeconds(TotalSeconds, preferredTimeScale);
MovieFileOutput.maxRecordedDuration = maxDuration;
MovieFileOutput.minFreeDiskSpaceLimit = 1024 * 1024;
if ([Session canAddOutput:MovieFileOutput])
[Session addOutput:MovieFileOutput];
// Create a VideoDataOutput and add it to the session
// AVCaptureVideoDataOutput *output = [[AVCaptureVideoDataOutput alloc] init];
//
// [Session addOutput:output];
//
// // Configure your output.
//
// dispatch_queue_t queue = dispatch_get_main_queue();
//
// [output setSampleBufferDelegate:self queue:queue];
//
// // dispatch_release(queue);
//
// // Specify the pixel format
//
// output.videoSettings = [NSDictionary dictionaryWithObject:
//
// [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
//
// forKey:(id)kCVPixelBufferPixelFormatTypeKey];
//
//
//
//
//
// AVCaptureVideoDataOutput *dataOutput = [[AVCaptureVideoDataOutput alloc] init];
//
// [dataOutput setAlwaysDiscardsLateVideoFrames:YES];
// [dataOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
// forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
// [dataOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
//
// if ([Session canAddOutput:dataOutput])
// [Session addOutput:dataOutput];
// sessionに追加
// [self setupVideoOutput];
[Session setSessionPreset:AVCaptureSessionPresetHigh];
if ([Session canSetSessionPreset:AVCaptureSessionPreset1920x1080])
//Check size based configs are supported before setting them
[Session setSessionPreset:AVCaptureSessionPreset1920x1080];
[Session startRunning];
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef )sampleBuffer fromConnection:(AVCaptureConnection *)connections {
NSLog(@"Buff");
pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(sampleBuffer);
VideoBuffer = pixelBuffer;
}
-(void)captureOutput:(AVCaptureOutput *)captureOutput didDropSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
NSLog(@"The drop");
}
My code isn't triggering AVCaptureVideoDataOutputSampleBufferDelegate because I am using AVCaptureMovieFileOutput instead of AVCaptureVideoDataOutput. AVCaptureMovieFileOutput apparently does not use sample buffers. As soon as I now how to set up AVCaptureVideoDataOutput correctly to use sample buffers I will post my code. Hope this helps somebody.