how to get camera calibration data from the TrueDepth Camera iOS?

774 views Asked by At

Goal: To obtain depth data & calibration data from the TrueDepth Camera for computer vision task.

I am very confused because for example apple says,

To use depth data for computer vision tasks, use the data in the cameraCalibrationData property to rectify the depth data.

which I tried and get nil, and then when looking through stack overflow I read,

cameraCalibrationData is always nil in photo, you have to get it from photo.depthData. As long as you're requesting depth data, you'll get the calibration data.

and so when I tried print(photo.depthData) to obtain depth & calibration data my output was:

Optional(hdis 640x480 (high/abs) 
calibration:
{intrinsicMatrix: [2735.35 0.00 2017.75 | 0.00 2735.35 1518.51 | 0.00 0.00 1.00], 
extrinsicMatrix: [1.00 0.00 0.00 0.00 | 0.00 1.00 0.00 0.00 | 0.00 0.00 1.00 0.00] pixelSize:0.001 mm, 
distortionCenter:{2017.75,1518.51}, 
ref:{4032x3024}})

^ But where is the depth data??`

Below is my entire code:

Note: I'm new to Xcode and I'm use to coding in python for computer vision task so I apologize in advance for the messy code.

import AVFoundation
import UIKit
import Photos

class ViewController: UIViewController {

    var session: AVCaptureSession?
    let output = AVCapturePhotoOutput()
    var previewLayer = AVCaptureVideoPreviewLayer()
    
    // MARK: - Permission check
    private func checkCameraPermissions() {
        switch AVCaptureDevice.authorizationStatus(for: .video) {
        case .notDetermined:
            AVCaptureDevice.requestAccess(for: .video) { [weak self] granted in
                guard granted else { return }
                DispatchQueue.main.async { self?.setUpCamera() }
            }
        case .restricted:
            break
        case .denied:
            break
        case .authorized:
            setUpCamera()
        @unknown default:
            break
        }
    }
    
    
    // MARK: - camera SETUP
    private func setUpCamera() {
        let session = AVCaptureSession()
        if let captureDevice = AVCaptureDevice.default(.builtInTrueDepthCamera, for: AVMediaType.depthData, position: .unspecified) {
            do {
                let input = try AVCaptureDeviceInput(device: captureDevice)
       
                if session.canAddInput(input) {
                    session.beginConfiguration()
                    session.sessionPreset = .photo
                    session.addInput(input)
                    session.commitConfiguration()
                }
                if session.canAddOutput(output) {
                    session.beginConfiguration()
                    session.addOutput(output)
                    session.commitConfiguration()
                }
                output.isDepthDataDeliveryEnabled = true

                previewLayer.videoGravity = .resizeAspectFill
                previewLayer.session = session
                
                session.startRunning()
                self.session = session
            }
            catch {
                print(error)
            }
        }
    }
    
    
    //MARK: - UI Button
    private let shutterButton: UIButton = {
        let button = UIButton(frame: CGRect(x: 0, y: 0, width: 100, height: 100))
        button.layer.cornerRadius = 50
        button.layer.borderWidth = 10
        button.layer.borderColor = UIColor.white.cgColor
        return button
    }()
    
    //MARK: - Video Preview Setup
    override func viewDidLoad() {
        super.viewDidLoad()
        view.backgroundColor = .black
        view.layer.insertSublayer(previewLayer, at: 0)
        view.addSubview(shutterButton)
        checkCameraPermissions()
        shutterButton.addTarget(self, action: #selector(didTapTakePhoto), for: .touchUpInside)
    }
    
    //MARK: - Video Preview Setup
    override func viewDidLayoutSubviews() {
        super.viewDidLayoutSubviews()
        previewLayer.frame = view.bounds
        shutterButton.center = CGPoint(x: view.frame.size.width/2, y: view.frame.size.height - 100)
    }
    
    //MARK: - Running and Stopping the Session
    override func viewWillAppear(_ animated: Bool) {
        super.viewWillAppear(animated)
        session!.startRunning()
    }
    
    //MARK: - Running and Stopping the Session
    override func viewWillDisappear(_ animated: Bool) {
        super.viewWillDisappear(animated)
        session!.stopRunning()
    }
    
    //MARK: - taking a photo
    @objc private func didTapTakePhoto() {
        let photoSettings = AVCapturePhotoSettings()
        photoSettings.isDepthDataDeliveryEnabled = true
        photoSettings.isDepthDataFiltered = true
        output.capturePhoto(with: photoSettings, delegate: self)
    }
}
extension ViewController: AVCapturePhotoCaptureDelegate {

    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        
        guard let data = photo.fileDataRepresentation() else {
            return
        }
        print(data)
        print(photo.depthData)
    
        let image = UIImage(data: data)
        session?.stopRunning()
    
        // ADDING the IMAGE onto the UI
        let imageView = UIImageView(image: image)
        imageView.contentMode = .scaleAspectFill
        imageView.frame = view.bounds
        view.addSubview(imageView)
        session?.stopRunning()
        
        
        // saving photo to library
        PHPhotoLibrary.requestAuthorization { status in
            guard status == .authorized else { return }
            
            PHPhotoLibrary.shared().performChanges({
                let creationRequest = PHAssetCreationRequest.forAsset()
                creationRequest.addResource(with: .photo, data: photo.fileDataRepresentation()!, options: nil)
            }, completionHandler: { _, error in
                if error != nil {
                    print("error")
                }
            })
        }
    }
}
1

There are 1 answers

0
Andrei G. On BEST ANSWER

What you need for Vision is a CVPixelBuffer (among other options), which you get from photo.depthData.depthDataMap

let depthData = photo.depthData
let depthBuffer = depthData.depthDataMap //CVPixelBuffer (orientation needs to be handled separately)

if depthData.depthDataQuality == .low {
    print("Low depth quality...")
}
    
if depthData.depthDataAccuracy == .relative {
    print("Depth data not accurate (relative)")
}
    

To get UIImage from CVPixelBuffer - see this answer