I'm try to understand what I'm doing wrong on my project. I'm try to draw a box over a detected face using vision kit.
I first set up the back camera with the following method.
func configureSession(){
// controllo se ho ricevuto auth a usar camera else ret
if setupResult != .success { return }
var defaultVideoDevice: AVCaptureDevice?
session.beginConfiguration() // per poter sett la conf
session.sessionPreset = .vga640x480 // Model image size is smaller.
do {
// seleziono il device migliore da usare come imput
if let dualCameraDevice = AVCaptureDevice.default(.builtInWideAngleCamera,for: .video,position: .back) {
print("select input tripleCamera")
defaultVideoDevice = dualCameraDevice
}
guard let defaultVideoDevice = defaultVideoDevice else {
print("errore Can not find any camera in configurate session")
return
}
let videoDeviceInput = try AVCaptureDeviceInput(device: defaultVideoDevice)
//Aggiungo input alla sessione
if session.canAddInput(videoDeviceInput){
session.addInput(videoDeviceInput)
self.videoDeviceInput = videoDeviceInput
} else {
print("Could not add video device input to the session")
setupResult = .configurationFailed
session.commitConfiguration()
return
}// fine add input
} catch let error {
print("Could set input device to session err \(error.localizedDescription)")
setupResult = .configurationFailed
session.commitConfiguration()
return
}
//-----aggiungi Output
if session.canAddOutput(videoDataOutput) {
session.addOutput(videoDataOutput)
// Add a video data output
videoDataOutput.alwaysDiscardsLateVideoFrames = true
videoDataOutput.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: Int(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)]
videoDataOutput.setSampleBufferDelegate(self, queue: sessionQueue)
}else {
print("Could not add video data output to the session")
session.commitConfiguration()
return
}
guard let captureConnection = videoDataOutput.connection(with: .video) else {return}
captureConnection.videoOrientation = .portrait //< DO I NEED TO CHANGE THIS??----------
captureConnection.isEnabled = true
if captureConnection.isVideoOrientationSupported {
print("capture connection orient \(captureConnection.videoOrientation.rawValue) / 3 landscape right")
}
// get the buffer size
do {
try defaultVideoDevice!.lockForConfiguration()
let dimensions = CMVideoFormatDescriptionGetDimensions((defaultVideoDevice?.activeFormat.formatDescription)!)
bufferSize.width = CGFloat(dimensions.width)
bufferSize.height = CGFloat(dimensions.height)
defaultVideoDevice!.unlockForConfiguration()
} catch {
print("// get the buffer size ERROR \(error.localizedDescription)")
}
let tapGesture = UITapGestureRecognizer(target: self, action: #selector(tapAction))
cameraView.addGestureRecognizer(tapGesture)
// setting up the view to show
cameraView.videoPreviewLayer.videoGravity = AVLayerVideoGravity.resizeAspectFill
session.commitConfiguration()
cameraView.session = session
rootLayer = cameraView.videoPreviewLayer
guard let conn = self.cameraView.videoPreviewLayer.connection else {return}
print("cameraView conn video orient \(conn.videoOrientation.rawValue)")
}
First question.. how do I need to set captureConnection.videoOrientation ?? I can't understand how this need to be set. my idea is using the phone in portrait and landscape..
Second question... When I use Vision how do I need to set orientation in the Handler? I tried to use a method from an apple example exifOrientationFromDeviceOrientation() but it is completely wrong in my case.
it only work correctly if I set the orientation as leftMirrored...
but why leftMirrored since I'm using the backCamera as input??? all the other setting give me the wrong box position.
var faceLayersArray : [CAShapeLayer] = []
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
self.sessionQueue.async {
let faceRequest = VNDetectFaceLandmarksRequest { req, err in
DispatchQueue.main.async {
self.faceLayersArray.forEach { layer in
layer.removeFromSuperlayer()
}
if let result = req.results as? [VNFaceObservation], result.count > 0 {
self.handleFace(observation: result)
} else {
}
}
}
let exifOrientation = self.exifOrientationFromDeviceOrientation()
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
return
}
let imageRequestHandler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, orientation: exifOrientation, options: [:])
do {
try imageRequestHandler.perform([faceRequest])
} catch {
print("Error sequance handler \(error)")
}
}
}
func handleFace(observation : [VNFaceObservation]){
for observation in observation {
let boundBoxFace = observation.boundingBox
let faceRectConverted = self.cameraView.videoPreviewLayer.layerRectConverted(fromMetadataOutputRect: boundBoxFace)
let faceRectPath = CGPath(rect: faceRectConverted, transform: nil)
let faceLayer = CAShapeLayer()
faceLayer.path = faceRectPath
faceLayer.fillColor = UIColor.clear.cgColor
faceLayer.strokeColor = UIColor.yellow.cgColor
faceLayersArray.append(faceLayer)
self.cameraView.videoPreviewLayer.addSublayer(faceLayer)
}
}
}
// from apple
public func exifOrientationFromDeviceOrientation() -> CGImagePropertyOrientation {
let curDeviceOrientation = UIDevice.current.orientation
let exifOrientation: CGImagePropertyOrientation
switch curDeviceOrientation {
case UIDeviceOrientation.portraitUpsideDown: // Device oriented vertically, home button on the top
exifOrientation = .left
case UIDeviceOrientation.landscapeLeft: // Device oriented horizontally, home button on the right
exifOrientation = .upMirrored
case UIDeviceOrientation.landscapeRight: // Device oriented horizontally, home button on the left
exifOrientation = .down
case UIDeviceOrientation.portrait: // Device oriented vertically, home button on the bottom
exifOrientation = .up
default:
exifOrientation = .up
}
return exifOrientation
}