5

I am not getting depthData when I take a photo on an iPhone X the first time I take a picture.

Camera permissions are valid

If I run the code on the iPhone X, take a picture, no depth data, if I tap again, I get depth data. If I add a delay before I call capturePhoto then I do get depthData

If I run the code on the iPhone XS, take a picture, I have depth data straight away

Is there something I'm missing with my code?

import UIKit
import AVFoundation
import os.log

extension OSLog {
    /// The subsystem for the app
    public static var appSubsystem = "com.my.app"
    /// Image processing category
    static let imageService = OSLog(subsystem: OSLog.appSubsystem, category: "Image")
}

/**
 Use this as a logging system, it's compiled out for release builds for now.
 */
func DLog(_ string: String, subsystem: OSLog, type: OSLogType = .debug) {
    #if DEBUG
    os_log("%{PUBLIC}@", log: subsystem, type: type, string)
    #endif
}

class ViewController: UIViewController {

    var photoProcessingQueue = DispatchQueue(label: "ProcessingQueue", attributes: [], autoreleaseFrequency: .workItem)

    var photoCaptureOutput: AVCapturePhotoOutput!
    var photoSession: AVCaptureSession!
    var initalized: Bool = false

    func setupCaptureSession() {

        defer {
            photoSession.commitConfiguration()
        }

        photoCaptureOutput = AVCapturePhotoOutput()
        photoSession = AVCaptureSession()

        DLog("setupCaptureSession \(String(describing: Thread.current))", subsystem: .imageService)

        photoSession.beginConfiguration()
        photoSession.sessionPreset = .photo

        do {

            guard let defaultCaptureDevice = AVCaptureDevice.default(.builtInDualCamera, for: .video, position: .back) else {
                DLog("can't find camera", subsystem: .imageService, type: .error)
                return
            }

            let videoDeviceInput = try AVCaptureDeviceInput(device: defaultCaptureDevice)

            if photoSession.canAddInput(videoDeviceInput) {
                photoSession.addInput(videoDeviceInput)
            } else {
                DLog("can't add camera", subsystem: .imageService, type: .error)
                return
            }

            if photoSession.canAddOutput(photoCaptureOutput) {
                photoSession.addOutput(photoCaptureOutput)

                photoCaptureOutput.isHighResolutionCaptureEnabled = true
                photoCaptureOutput.isDepthDataDeliveryEnabled = photoCaptureOutput.isDepthDataDeliverySupported
                photoCaptureOutput.maxPhotoQualityPrioritization = .quality
            } else {
                DLog("Could not add photo output to the session", subsystem: .imageService, type: .error)
            }

        } catch {

            DLog("general camera error: \(error)", subsystem: .imageService, type: .error)
        }

    }

    @IBAction func takePhoto(sender: UIButton) {
        photoProcessingQueue.async {

            if !self.initalized {
                self.setupCaptureSession()
                self.initalized = true
            }

            DLog("Photo session running: \(self.photoSession.isRunning) \(String(describing: Thread.current))", subsystem: .imageService)
            self.photoSession.startRunning()
            DLog("Photo session running: \(self.photoSession.isRunning) \(String(describing: Thread.current))", subsystem: .imageService)

            let photoSettings: AVCapturePhotoSettings
            if self.photoCaptureOutput.availablePhotoCodecTypes.contains(.jpeg) {
                photoSettings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
            } else {
                photoSettings = AVCapturePhotoSettings()
            }

            photoSettings.flashMode = .auto
            photoSettings.isDepthDataDeliveryEnabled = self.photoCaptureOutput.isDepthDataDeliverySupported
            photoSettings.photoQualityPrioritization = .quality

            // BUG iPhone X returns no depthData without a delay here...
            //Thread.sleep(forTimeInterval: 0.1)

            // Actually capture photo
            self.photoCaptureOutput.capturePhoto(with: photoSettings, delegate: self)
        }
    }
}

extension ViewController : AVCapturePhotoCaptureDelegate {
    func photoOutput(_ output: AVCapturePhotoOutput, didFinishProcessingPhoto photo: AVCapturePhoto, error: Error?) {
        DLog("photo.depthData?.depthDataMap: \(photo.depthData?.depthDataMap)", subsystem: .imageService)
    }
}
Daniel Storm
  • 18,301
  • 9
  • 84
  • 152
Chris
  • 2,739
  • 4
  • 29
  • 57
  • Hi, Apple's sample code calls capturePhoto in the background queue: https://developer.apple.com/documentation/avfoundation/cameras_and_media_capture/avcam_building_a_camera_app – Chris Feb 14 '20 at 17:28
  • As you say, adding a 0.1 second delay before I take the photo and things work fine. I just don't want to add a sleep – Chris Feb 14 '20 at 17:29
  • 1
    OK you're totally right about that, sorry. I blew it. — But their code still does not call `startRunning` and `capturePhoto` in the same breath the way yours does. You don't have to add a sleep, you can just use `asyncAfter`. But that is still rather risky; you are just guessing, in effect, how long to delay. – matt Feb 14 '20 at 17:51
  • Thanks, the thing that bothers me is why does it work on the XS and not the X? – Chris Feb 14 '20 at 18:12
  • 1
    Well, they have different processors, threading might work differently, and so forth. Lots of little details differ between models. – matt Feb 14 '20 at 18:19
  • If I move the setup into viewDidLoad I still don’t get depthData on the first try, it’s baffling me – Chris Feb 14 '20 at 18:24

0 Answers0