0

I made a live translation app that identifies an object and translates it using the user's camera. It works just fine on my iPhone 6s and doesn't crash in any of the simulators, but when I run it on an iPhone 6, it crashes as soon I try to segue to the camera feed. Apple also says it crashes on the iPad as well.

Do certain devices just not support Vision API or is something wrong with my code?

import UIKit
import AVKit
import Vision

var lang = ""
var lang2 = ""


class ViewController: UIViewController, AVCaptureVideoDataOutputSampleBufferDelegate, AVCapturePhotoCaptureDelegate {



      @IBAction func screenshotB(_ sender: Any) {
       //screenshot camera screen view

    }
    @IBOutlet weak var screenshotBOutlet: UIButton!
    @IBOutlet weak var swirlyGuy: UIActivityIndicatorView!
    @IBOutlet weak var title1: UILabel!
    @IBOutlet weak var settingsButtonOutlet: UIButton!
    @IBOutlet weak var launchScreen: UIViewX!
    @IBOutlet weak var launchScreenLogo: UIImageView!

    func stopSwirlyGuy(){
        swirlyGuy.stopAnimating()
    }
    let identifierLabel: UILabel = {
        let label = UILabel()
        label.backgroundColor = UIColor(red: 0, green: 0, blue:0, alpha: 0.4)
        label.textColor = .white
        label.textAlignment = .center
        label.translatesAutoresizingMaskIntoConstraints = false
        return label
    }()

    @IBAction func prepareForUnwind (segue:UIStoryboardSegue) {

    }

    override func viewDidLoad() {
        super.viewDidLoad()
        launchScreen.alpha = 1
        launchScreenLogo.alpha = 1
        swirlyGuy.startAnimating()


        // start up the camera

        let captureSession = AVCaptureSession()
        captureSession.sessionPreset = .hd4K3840x2160

        guard let captureDevice = AVCaptureDevice.default(for: .video) else { return }
        guard let input = try? AVCaptureDeviceInput(device: captureDevice) else { return }
        captureSession.addInput(input)

        captureSession.startRunning()

        let previewLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        view.layer.addSublayer(previewLayer)
        previewLayer.frame = view.frame

        let dataOutput = AVCaptureVideoDataOutput()
        dataOutput.setSampleBufferDelegate(self, queue: DispatchQueue(label: "videoQueue"))
        captureSession.addOutput(dataOutput)



        setupIdentifierConfidenceLabel()
        setupSettingsButton()
        setupTitle()
        setupSwirlyGuy()
        setupScreenshot()
    }

    override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)

        UIView.animate(withDuration: 1.5) {
            self.launchScreen.alpha = 0
            self.launchScreenLogo.alpha = 0
        }

    }
    fileprivate func setupSettingsButton() {

        view.addSubview(settingsButtonOutlet)
    }

    fileprivate func setupScreenshot() {

        view.addSubview(screenshotBOutlet)
    }

    fileprivate func setupSwirlyGuy() {

        view.addSubview(swirlyGuy)
    }

    fileprivate func setupTitle() {

        view.addSubview(title1)
    }

    fileprivate func setupIdentifierConfidenceLabel() {
        view.addSubview(identifierLabel)
        identifierLabel.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true
        identifierLabel.leftAnchor.constraint(equalTo: view.leftAnchor).isActive = true
        identifierLabel.rightAnchor.constraint(equalTo: view.rightAnchor).isActive = true
        identifierLabel.heightAnchor.constraint(equalToConstant: 100).isActive = true
        identifierLabel.numberOfLines = 0
    }





    func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
        //        print("Camera was able to capture a frame:", Date())

        guard let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { return }

      // model
        guard let model = try? VNCoreMLModel(for: Resnet50().model) else { return }
        let request = VNCoreMLRequest(model: model) { (finishedReq, err) in

            //perhaps check the err

            //            print(finishedReq.results)

            guard let results = finishedReq.results as? [VNClassificationObservation] else { return }

            guard let firstObservation = results.first else { return }

            print(firstObservation.identifier, firstObservation.confidence)

            let x = (firstObservation.confidence)

            let y = (x * 10000).rounded() / 10000




            let z = (firstObservation.identifier)

            let s = (self.translateSpanish(object1: firstObservation.identifier))

            let f = (self.translateFrench(object1: firstObservation.identifier))
       //      var lang = ""
       //      var lang2 = ""

             if language == "English" {
             lang = z
             }
             else if language == "Spanish" {
             lang = s
             }
             else {
             lang = f

             }

             if language2 == "Spanish" {
             lang2 = s
             }
             else if language2 == "English" {
             lang2 = z
             }
             else {
             lang2 = f
             }

             DispatchQueue.main.async {



             self.identifierLabel.text = "\(lang)" + " = " + "\(lang2) \n \(y * 100)% accuracy"
             self.stopSwirlyGuy()
             }



        }




        try? VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:]).perform([request])
    }

//Translation fucntions omitted for brevity

This is the code for the view controller that segues into the main screen where the camera feed and Vision processing take place.

import UIKit

class FirstLaunchViewController: UIViewController {

@IBOutlet weak var title1: UILabelX!
@IBOutlet weak var logo1: UIImageView!
@IBOutlet weak var description1: UILabel!
@IBOutlet weak var buttonOutlet: UIButtonX!
@IBOutlet weak var initialBackground: UIViewX!
@IBOutlet weak var initialLogo: UIImageView!

@IBAction func toVC(_ sender: Any) {
        UserDefaults.standard.set(false, forKey: "name")
        performSegue(withIdentifier: "toMain", sender: self)
}



override func viewDidLoad() {
    super.viewDidLoad()
    initialLogo.alpha = 1
    initialBackground.alpha = 1
    title1.alpha = 0
    logo1.alpha = 0
    description1.alpha = 0
    buttonOutlet.alpha = 0


    // Do any additional setup after loading the view.
}

override func viewDidAppear(_ animated: Bool) {
    super.viewDidAppear(animated)


    UIView.animate(withDuration: 1.5, animations: {
        self.initialLogo.alpha = 0
        self.initialBackground.alpha = 0
    }) { (true) in
        self.initialBackgroundGone()
    }
}
       func initialBackgroundGone() {
    UIView.animate(withDuration: 1.5, animations: {
        self.title1.alpha = 1
    }) { (true) in
        self.showBackgroundAgain()
    }

}

func showBackgroundAgain() {
    UIView.animate(withDuration: 1.3, animations: {
        self.logo1.alpha = 1
    }) { (true) in
        self.showTitle()
    }
}

func showTitle() {
    UIView.animate(withDuration: 1.5, animations: {
        self.description1.alpha = 1
    }) { (true) in
        self.showEverythingElse()
    }
}

func showEverythingElse() {
    UIView.animate(withDuration: 3.5) {
        self.buttonOutlet.alpha = 1
    }
}

}

Andy Jazz
  • 49,178
  • 17
  • 136
  • 220
Codemaster99
  • 87
  • 1
  • 1
  • 7
  • (1) You didn't mention it, but I assume you are targeting iOS 11 only, right? (2) Do you have any more details about the crash reason? Are you sure it deals with `Vision` and not something with `AVKit`? –  Sep 21 '17 at 17:12
  • Yes @dfd, I am targeting IOS 11 only, and I assumed it had to do with the new API because it works fine on the iPhone 6s and 7, but not the iPhone 6 or iPad. If the code works fine on newer devices, I don't know what could make it not work on older devices. – Codemaster99 Sep 21 '17 at 17:16
  • 2
    Which version of the iPad are you testing on? Are you receiving any low memory notifications like applicationDidReceiveMemoryWarning on the App Delegate? I suspect you're probably just crashing because of low memory. – Chris Allwein Sep 21 '17 at 19:04
  • I'm headed where @ChrisAllwien is. It's been a few months since I use a spare iPad Mini 4, upgraded to iOS 11, to play with `Vision`. (No problems.) But I'm trying to rule out things - what all have you ruled out? Yes, I haven't used `Vision` on my devices (iPhone SE and with the GM releases, iPad Mini). Narrowing down the issue seems like the right thing at this point. –  Sep 21 '17 at 20:53
  • I don't have an iPad to test it on. I tested it on an iPhone 6s and 7 and it ran fine. Apple rejected the app during the review process because they said it crashed when they segued to the main camera view. I've run iPads in the simulator before and it runs fine, but obviously it doesn't display a video feed in the main camera view since the simulator doesn't have a camera. So that's why I believe it has to do with Vision. Since it works fine on newer devices. @ChrisAllwein – Codemaster99 Sep 21 '17 at 20:59
  • ^^^^^^^^^^^^^^^ @dfd – Codemaster99 Sep 21 '17 at 21:02
  • 2
    One more question: Since the simulator doesn't have a camera, how does it run fine? Your code doesn't (yet) account for something without a device. One last idea - delete the app from your physical devices, then run it. Does it work? I actually have three iPads I can test it on - for iOS 11 it'll take another day sorry - but it does sound (to me) like you've been analyzing the issue as best you can. Set up something with your full source code and I'll try it over the next 1-2 days. –  Sep 21 '17 at 21:47
  • @dfd When I run it in the simulator, the screen where the camera feed would be is just white. It doesn't crash or anything. My friend is bringing over her iPad tomorrow so I'll test it then and examine the crash log. I ran the app again on my phone today and looked at the cpu usage- it seemed pretty high. I'm thinking maybe on older devices like the iPhone 6 and older iPads that it's too much and it just crahses? – Codemaster99 Sep 22 '17 at 04:05
  • @dfd The crash log from Apple says that the Exception Type Is "Exception Type: EXC_CRASH (SIGABRT)", which Apple, in its documentation, says is caused by an "uncaught Objective-C exception" and "App Extensions will be terminated with this exception type if they take too much time to initialize." So can these devices just not handle the processing? – Codemaster99 Sep 22 '17 at 04:08

1 Answers1

1

This is a lot of code but I think your issue comes from the video preset your are using as iPhone 6 doesn't have support for 4K video recording.

When setting the session preset you should test that it is supported by all the targeted devices:

if captureSession.canSetSessionPreset(.hd4K3840x2160) {
    captureSession.sessionPreset = .hd4K3840x2160
} else {
    captureSession.sessionPreset = .high // or any other preset that suits your needs
}
Sparga
  • 1,517
  • 12
  • 21
  • I forgot to update this post but yes, this was the tiny detail I had overlooked. I didn’t know about canSetSessionPreset though, I manually checked for each iPhone model to determine whether it could support 4K, this is much simpler. – Codemaster99 Oct 04 '17 at 04:37
  • The one thing I haven’t been able to figure out is how to get 60fps video. When you open an AR app it displays the video feed as 60fps, but I haven’t been able to figure out how to set that up. – Codemaster99 Oct 04 '17 at 04:38