GPUImageStillCamera image preview jumps when taking a photo

687 views Asked by At

I am taking a square cropped photo with GPUImageStillCamera and allowing the user to zoom the camera. When the user clicks to take a picture the camera jumps forward for a split second (as if the camera zoomed in even further past the area the user zoomed to and then immediately returns to the correct crop once the image is returned to screen). This only happens when the user has zoomed the camera. If they have not zoomed the camera the flicker/jump does not happen. (The image return has the correct crop whether or not the user has zoomed).

Thoughts?

Creating camera and adding square crop

//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];

Image zoom method

-(void)imagePinch:(UIPinchGestureRecognizer *)recognizer{ //Controlling the zoom scale as the user pinches the live preview

if (recognizer.state == UIGestureRecognizerStateBegan) {

    zoomOutAdder = 0.0f;
    if (currentScale > 2) {
        zoomOutAdder = currentScale;
    }
}


float addition = (recognizer.scale - lastScale);

if (addition > 0) {
    addition = addition *1.7;
}

if (addition < 0) {
    addition = addition *(1.7+zoomOutAdder);
}


currentScale = currentScale +addition;
lastScale = recognizer.scale;


 if (currentScale < 1) {
 currentScale = 1;
 }

 if (currentScale > 4) {
 currentScale =4;
 }

if (currentScale == 1) {
    zoomOutAdder = 0.0f;
}

cameraImagePreview.transform = CGAffineTransformMakeScale(currentScale, currentScale);

if (recognizer.state == UIGestureRecognizerStateEnded) {
    lastScale = 1.0f;
}

Take a photo method

      //Adjust crop based on zoom scale of the user
CGFloat zoomReciprocal = 1.0f / currentScale;
CGPoint offset = CGPointMake(((1.0f - zoomReciprocal) / 2.0f), (((1.0f- zoomReciprocal)*(720.0f/1280.0f)) / 2.0f) + ((720.0f/1280.0f)/2)) ;
CGRect newCrop =  cropFilter.cropRegion;
newCrop.origin.x = offset.x;
newCrop.origin.y = offset.y;
newCrop.size.width = cropFilter.cropRegion.size.width * zoomReciprocal;
newCrop.size.height = cropFilter.cropRegion.size.height *zoomReciprocal;
cropFilter.cropRegion = newCrop;
 */

//Place photo inside an image preview view for the user to decide if they want to keep it.
[stillCamera capturePhotoAsImageProcessedUpToFilter:cropFilter withOrientation:imageOrientation withCompletionHandler:^(UIImage *processedImage, NSError *error) {


    //Pause the current camera
    [stillCamera pauseCameraCapture];

    //Rest of method

ADDED METHODS

- (void) flipCamera {

if (stillCamera.cameraPosition != AVCaptureDevicePositionFront) {
    [UIView animateWithDuration:.65 animations:^{
        flipCamera.transform = CGAffineTransformMakeScale(-1, 1);
    }];

} else {
    [UIView animateWithDuration:.65 animations:^{
        flipCamera.transform = CGAffineTransformMakeScale(1, 1);
    }];
}
[self performSelector:@selector(rotateCamera) withObject:0 afterDelay:.2];
}


- (void) rotateCamera {

[stillCamera rotateCamera];

//Adjust flash settings as needed
[stillCamera.inputCamera lockForConfiguration:nil];
if (stillCamera.cameraPosition != AVCaptureDevicePositionFront) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
}

NSAttributedString *attributedFlash =
[[NSAttributedString alloc]
 initWithString:@"off"
 attributes:
 @{
   NSFontAttributeName : [UIFont fontWithName:@"Roboto-Regular" size:13.0f],
   NSForegroundColorAttributeName : [UIColor colorWithWhite:1 alpha:.55],
   NSKernAttributeName : @(.25f)
   }];
flashLabel.attributedText = attributedFlash;

[UIView animateWithDuration:.2 animations:^{
    [flash setTintColor:[UIColor colorWithWhite:1 alpha:.55]];
}];

[stillCamera.inputCamera unlockForConfiguration];
}



- (void) changeFlash {

if (stillCamera.cameraPosition == AVCaptureDevicePositionFront) {//no flash available on front of camera
    return;
}

[stillCamera.inputCamera lockForConfiguration:nil];
if (stillCamera.inputCamera.flashMode == AVCaptureFlashModeOff) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOn];
    [self animateFlashWithTintColor:[UIColor colorWithWhite:1 alpha:1] andString:@"on"];

} else if (stillCamera.inputCamera.flashMode == AVCaptureFlashModeOn) {
    [stillCamera.inputCamera setFlashMode:AVCaptureFlashModeOff];
    [self animateFlashWithTintColor:[UIColor colorWithWhite:1 alpha:.55] andString:@"off"];
}

[stillCamera.inputCamera unlockForConfiguration];
}



- (void) animateFlashWithTintColor:(UIColor *)color andString:(NSString *)text {

//Set new text
NSAttributedString *attributedFlash =
[[NSAttributedString alloc]
 initWithString:text
 attributes:
 @{
   NSFontAttributeName : [UIFont fontWithName:@"Roboto-Regular" size:13.0f],
   NSForegroundColorAttributeName : [UIColor colorWithWhite:1 alpha:.55],
   NSKernAttributeName : @(.25f)
   }];
flashLabel.attributedText = attributedFlash;

float duration = .7;

[UIView animateKeyframesWithDuration:duration delay:0 options:0 animations:^{
    [UIView addKeyframeWithRelativeStartTime:0 relativeDuration:duration animations:^{
        [flash setTintColor:color];
    }];

    [UIView addKeyframeWithRelativeStartTime:0 relativeDuration:.7/duration animations:^{
        flash.transform = CGAffineTransformMakeRotation(M_PI);

    }];

}completion:^(BOOL finished){
    flash.transform = CGAffineTransformIdentity;
}];
}


-(void) usePhoto {

if ([ALAssetsLibrary authorizationStatus] != ALAuthorizationStatusAuthorized){
    NSLog(@"Do Not Have Right To Save to Photo Library");
}

//Save Image to Phone Album & save image
UIImageWriteToSavedPhotosAlbum(takenPhoto.image, nil, nil, nil);

//Save Image to Delegate
[self.delegate saveImageToDatabase:takenPhoto.image];
[self performSelector:@selector(dismissCamera) withObject:0 afterDelay:.4];
}

Some additional code showing the creation of the the various camera elements used to capture a photo.

centerPoint = CGPointMake(self.view.frame.size.width/2, (cameraHolder.frame.size.height+50+self.view.frame.size.height)/2);
cameraImagePreview = [[GPUImageView alloc] initWithFrame:CGRectMake(0, 0, cameraHolder.frame.size.width, cameraHolder.frame.size.width)];
[cameraHolder addSubview:cameraImagePreview];
UITapGestureRecognizer *tapGesture = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(imageTouch:)];
[cameraImagePreview addGestureRecognizer:tapGesture];
UIPinchGestureRecognizer *pinchGesture = [[UIPinchGestureRecognizer alloc] initWithTarget:self action:@selector(imagePinch:)];
[cameraImagePreview addGestureRecognizer:pinchGesture];


float scaleForView = self.view.frame.size.width/720.0;
fullCameraFocusPoint = [[UIView alloc]initWithFrame:CGRectMake(0, 0, self.view.frame.size.width, 1280*scaleForView)];
fullCameraFocusPoint.center = CGPointMake(cameraHolder.frame.size.width/2, (cameraHolder.frame.size.width/2)+50);
[self.view insertSubview:fullCameraFocusPoint atIndex:0];

takenPhoto = [[UIImageView alloc]initWithFrame:cameraHolder.frame];
takenPhoto.alpha = 0;
[self.view addSubview:takenPhoto];


//Add in filters
stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;

//Creating a square crop filter
cropFilter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.f, (720.0f/1280.0f)/2.0f, 1.f, (720.0f/1280.0f))];

//Create standard vignette filter
vignetteFilter = [[GPUImageVignetteFilter alloc] init]; //1
vignetteFilter.vignetteCenter = CGPointMake(.5, .5);
vignetteFilter.vignetteStart = 0.4f;
vignetteFilter.vignetteEnd = 1.08f;

//Add filters to photo
[cropFilter addTarget:vignetteFilter];
[stillCamera addTarget:cropFilter];
[vignetteFilter addTarget:cameraImagePreview];
[stillCamera startCameraCapture];
0

There are 0 answers