I have a problem with captureOutput method not being called during the session. I tried adding NSLog to it but it seems that method is never called. Moreover, I tried searching for the solution, but didn't find any.
#import <AVFoundation/AVFoundation.h>
#import <UIKit/UIImage.h>
@interface Cam : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
-(void)captureOutput:(AVCaptureOutput *)output
didOutputSampleBuffer:(CMSampleBufferRef)buffer
fromConnection:(AVCaptureConnection *)connection;
@end
@interface Cam ()
{
}
-(BOOL)start: (int)deviceIndex;
-(void)stop;
-(NSData *)getFrame;
@end
@implementation Cam
Cam *cam;
CVImageBufferRef head;
AVCaptureSession *session;
int count;
-(id)init
{
self = [super init];
head = nil;
count = 0;
return self;
}
-(void)dealloc
{
@synchronized (self)
{
if (head != nil)
{
CFRelease(head);
}
}
}
-(BOOL)start:(int)deviceIndex
{
int index;
NSArray *devices;
AVCaptureDeviceDiscoverySession *discoverySession;
AVCaptureDevice *device;
AVCaptureDeviceInput *input;
AVCaptureVideoDataOutput *output;
NSError *error;
dispatch_queue_t queue;
session = [[AVCaptureSession alloc] init];
session.sessionPreset = AVCaptureSessionPresetMedium;
discoverySession = [AVCaptureDeviceDiscoverySession discoverySessionWithDeviceTypes:@[AVCaptureDeviceTypeBuiltInWideAngleCamera]
mediaType:AVMediaTypeVideo position:AVCaptureDevicePositionUnspecified];
devices = discoverySession.devices;
index = deviceIndex;
if (index < 0 || index >= [devices count])
{
log_debug("* Failed to open device (%d)\n", index);
return NO;
}
device = devices[index];
input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!input)
{
log_debug("* Failed to capture input (%s)\n", [error.localizedDescription UTF8String]);
return NO;
}
[session addInput:input];
output = [[AVCaptureVideoDataOutput alloc] init];
[session addOutput:output];
queue = dispatch_queue_create("cam_queue", NULL);
[output setAlwaysDiscardsLateVideoFrames:YES];
[output setVideoSettings:@{(NSString *)kCVPixelBufferPixelFormatTypeKey:@(kCVPixelFormatType_32BGRA)}];
[output setSampleBufferDelegate:self queue:queue];
[session startRunning];
return YES;
}
-(void)stop
{
[session stopRunning];
}
-(NSData *)getFrame
{
int timer;
CIImage *ciImage;
CIContext *temporaryContext;
CGImageRef videoImage;
UIImage *uiImage;
NSData *frame;
for (timer = 0; timer < 500; timer++)
{
if (count > 5)
{
break;
}
usleep(10000);
}
@synchronized (self)
{
if (head == nil)
{
log_debug("* Head is somehow nil (count: %d)\n", count);
return nil;
}
ciImage = [[CIImage imageWithCVPixelBuffer:head] imageByApplyingOrientation:6];
temporaryContext = [CIContext contextWithOptions:nil];
videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0,
CVPixelBufferGetHeight(head),
CVPixelBufferGetWidth(head))];
uiImage = [[UIImage alloc] initWithCGImage:videoImage];
frame = UIImageJPEGRepresentation(uiImage, 1.0);
CGImageRelease(videoImage);
return frame;
}
return nil;
}
-(void)captureOutput:(AVCaptureOutput *)output
didOutputSampleBuffer:(CMSampleBufferRef)buffer
fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef frame;
CVImageBufferRef prev;
frame = CMSampleBufferGetImageBuffer(buffer);
CFRetain(frame);
@synchronized (self)
{
prev = head;
head = frame;
count++;
}
if (prev != nil)
{
CFRelease(prev);
}
}
@end
So, what I do is basically:
...
NSData *frame;
@autoreleasepool
{
cam = [[Cam alloc] init];
if ([cam start:camID])
{
frame = [cam getFrame];
if (frame == nil)
log_debug("* Frame is somehow nil?\n");
}
else
{
cam = nil;
}
}
...
As a result I get * Frame is somehow nil? and * Head is somehow nil (count: 0)
I don't know what else I can do or fix. Please help me with this issue.
Thanks in advance