1

I am making QR scanner. My code is working when all of it written in one place inside ViewController but when I modularised it then I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate.

import Foundation
import UIKit
import AVFoundation

class CameraSource : NSObject {

    private var session                     : AVCaptureSession?
    private var inputDevice             : AVCaptureDeviceInput?
    private var videoPreviewLayer   : AVCaptureVideoPreviewLayer?

    private var captureMetadataOutput : AVCaptureMetadataOutput?

    func setCaptureMetadataOutput() {
        self.captureMetadataOutput = nil
        self.captureMetadataOutput = AVCaptureMetadataOutput()
    }

    func getCaptureMetadataOutput() -> AVCaptureMetadataOutput? {
        return self.captureMetadataOutput
    }

    func setInputDevice(inputDevice : AVCaptureDeviceInput?) {
        self.inputDevice = inputDevice
    }

    func getInputDevice() -> AVCaptureDeviceInput? {
        return self.inputDevice
    }

    func setSession(session : AVCaptureSession?) {
        self.session = session
    }

    func getSession() -> AVCaptureSession? {
        return self.session
    }

    func setMetadataObjects(metaObjects : [AVMetadataObject.ObjectType], delegate : AVCaptureMetadataOutputObjectsDelegate) {
        assert(self.captureMetadataOutput != nil)
        self.captureMetadataOutput!.setMetadataObjectsDelegate(delegate, queue: DispatchQueue.main)
        self.captureMetadataOutput!.metadataObjectTypes = metaObjects
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) {
        assert(session != nil)

        videoPreviewLayer                                                           = AVCaptureVideoPreviewLayer(session: session!)
        videoPreviewLayer!.videoGravity                                 = videoGravity
        videoPreviewLayer!.connection!.videoOrientation = orientation
    }

    func addVideoLayerToImageView(imageView : UIImageView) {
        assert(self.videoPreviewLayer != nil)

        imageView.layer.addSublayer(self.videoPreviewLayer!)
        self.videoPreviewLayer!.frame = imageView.bounds
    }

    func startSession() {
        assert(session != nil)
        self.session!.startRunning()
    }


    /*==========================================================================
    STATIC FUNCTIONS
    ==========================================================================*/

    static func getBackCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .back)!
    }

    static func getFrontCamera() -> AVCaptureDevice {
        return AVCaptureDevice.default(.builtInWideAngleCamera, for: AVMediaType.video, position: .front)!
    }

    static func isCameraAvailable() -> Bool {
        if #available(iOS 10.0, *) {
            let count : Int = AVCaptureDevice.DiscoverySession(deviceTypes: [.builtInWideAngleCamera],
                                                                                                                 mediaType: AVMediaType.video,
                                                                                                                 position: .unspecified).devices.count
            if count > 0 { return true }
        }
        else {
            let count = AVCaptureDevice.devices(for: AVMediaType.video).count
            if count > 0 { return true }
        }
        return false
    }


    /*==========================================================================
    CAMERA BUILDER CLASS
    ==========================================================================*/

    class Builder {

        var cameraSource : CameraSource

        init() {
            cameraSource = CameraSource()
        }

        func createSession() -> Builder {
            if (cameraSource.getSession() != nil) {
                cameraSource.setSession(session: nil)
            }
            cameraSource.setSession(session: AVCaptureSession())
            return self
        }

        func setSessionPreset(preset : AVCaptureSession.Preset) -> Builder {
            assert(cameraSource.getSession() != nil)

            cameraSource.getSession()!.sessionPreset = preset
            return self
        }

        func attachInputDevice(camera : AVCaptureDevice) throws -> Builder {

            try self.prepareInputDevice(camera: camera)
            try self.addInputToSession()

            assert(cameraSource.inputDevice != nil)
            return self
        }

        func addOutputToSessionForMetaData() throws -> CameraSource {
            cameraSource.setCaptureMetadataOutput()

            assert(cameraSource.getSession() != nil && cameraSource.getCaptureMetadataOutput() != nil)

            if !cameraSource.getSession()!.canAddOutput(cameraSource.getCaptureMetadataOutput()!) {
                throw AppErrorCode.cameraError("Unable to attach output to camera session")
            }
            cameraSource.getSession()!.addOutput(cameraSource.getCaptureMetadataOutput()!)

            return self.cameraSource
        }

        /*==========================================================================
        BUILDER PRIVATE FUNCTIONS
        ==========================================================================*/

        private func prepareInputDevice(camera : AVCaptureDevice) throws {
            do {
                let inputDevice = try AVCaptureDeviceInput(device: camera)
                cameraSource.setInputDevice(inputDevice: inputDevice)

            } catch let error as NSError {
                print(error.localizedDescription)
                throw AppErrorCode.cameraError("Unable to attach input to camera session")
            }
        }

        private func addInputToSession() throws {
            if(cameraSource.getSession() == nil) {
                throw AppErrorCode.cameraError("Unable to create camera session")
            }

            assert(cameraSource.getInputDevice() != nil && cameraSource.getSession()!.canAddInput(cameraSource.getInputDevice()!))

            cameraSource.getSession()!.addInput(cameraSource.getInputDevice()!)
        }

    }


}

My QR scanner Code looks like

import UIKit
import Foundation
import AVFoundation

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

class QRScanner : NSObject {

    private var cameraSource : CameraSource?

    var delegate : QRScannerDelegate?

    func prepareCamera (delegate : QRScannerDelegate) throws -> QRScanner {
        do {
            self.delegate = delegate
            self.cameraSource = try CameraSource
                .Builder()
                .createSession()
                .setSessionPreset(preset: .photo)
                .attachInputDevice(camera: CameraSource.getBackCamera())
                .addOutputToSessionForMetaData()

            self.cameraSource!.setMetadataObjects(metaObjects: [.qr], delegate: self as AVCaptureMetadataOutputObjectsDelegate)

        } catch let err as NSError {
            print(err.localizedDescription)
            self.cameraSource = nil
            throw AppErrorCode.cameraError("Unable to process camera with one or more issue")
        }

        return self
    }

    func initViewoPreviewLayer(videoGravity : AVLayerVideoGravity, orientation : AVCaptureVideoOrientation) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.initViewoPreviewLayer(videoGravity: videoGravity, orientation: orientation)
        return self
    }

    func addVideoLayerToImageView(imageView : UIImageView) -> QRScanner{
        assert(cameraSource != nil)

        self.cameraSource!.addVideoLayerToImageView(imageView: imageView)
        return self
    }

    func startSession() {
        assert(cameraSource != nil)
        self.cameraSource!.startSession()
    }
}

extension QRScanner : AVCaptureMetadataOutputObjectsDelegate {
    func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {

        print("Delegate called")
        if metadataObjects.count == 0 {

            self.delegate?.scannedData("No Data")

        } else {

            let metadataObj = metadataObjects[0] as! AVMetadataMachineReadableCodeObject
            if metadataObj.type == AVMetadataObject.ObjectType.qr {

                if metadataObj.stringValue != nil {
                    print("Scanner Getting data: \(metadataObj.stringValue!)")
                    self.delegate?.scannedData(metadataObj.stringValue!)
                }

            }

        }
    }
}

I have implemented the QRScannerDelegate in my ViewController but I am not getting anything in there. Moreover I am not getting callback inside AVCaptureMetadataOutputObjectsDelegate even.

I tried passing the ViewController instance as AVCaptureMetadataOutputObjectsDelegate then I was getting callback with the scanned info.

So My question is why is this happening?

1) When I am passing normal class as AVCaptureMetadataOutputObjectsDelegate I am not getting callback. But.

2) Whe I am passing UIViewController instance as AVCaptureMetadataOutputObjectsDelegate then I am able to get callback.

UPDATE

This is how I am calling prepareCamera from my View Controller

override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }
Thullo
  • 97
  • 15

1 Answers1

1

Its hard to say for sure without knowing how you called prepareCamera as this is what triggers setMetadataObjectsDelegate but to me it looks like you may not be keeping a strong reference to QRScanner in your ViewController (instantiating it as in instance variable) Which could explain why the callback is getting hit when your ViewController is your AVCaptureMetadataOutputObjectsDelegate as the ViewController is still in memory.

It's also worth noting that if the ViewController is your QRScannerDelegate you will want to define delegate as weak var delegate : QRScannerDelegate? to prevent a memory leak.

EDIT: Change

override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try QRScanner().prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

to

var qrScanner = QRScanner()
override func viewDidAppear(_ animated: Bool) {
        super.viewDidAppear(animated)
        do {

            try self.qrScanner.prepareCamera(delegate: self)
                    .initViewoPreviewLayer(videoGravity: .resizeAspectFill, orientation: .portrait)
                    .addVideoLayerToImageView(imageView: self.qrScannerImageView)
                    .startSession()

        } catch {

            print("Some Camera Error")

        }
        self.createOverlay()
    }

and change

protocol QRScannerDelegate {
    func scannedData(_ scannedString : String)
}

to

protocol QRScannerDelegate: class {
    func scannedData(_ scannedString : String)
}

To Allow a weak delegate

AVCaptureMetadataOutputObjectsDelegate is tough, but you can do some really cool stuff with it! So keep at it.

I pulled some QRScanner code I wrote a while ago and put it into a gist for you if you want to check it out. Its a bit more stripped down than what you have, but you may find it helpful. https://gist.github.com/aChase55/733ea89af1bfa80c65971d3bc691f0b2

Alex Chase
  • 960
  • 1
  • 7
  • 11
  • I have update my question with how I called prepare Camera. Can you give me some more insight? – Thullo May 02 '19 at 11:11
  • Yes I am using using ViewController as QRScannerDelegate. I tried keeping variable as weak reference but xcode was showing error. So I had to remove it. – Thullo May 02 '19 at 11:12
  • Can you tell me why we have to add 'class' in protocol for referencing it as weak variable, how does it help? – Thullo May 03 '19 at 05:38
  • 1
    If you don't include `class` in your protocol, the protocol can be implemented by either a value type or reference type. By nature, it is not possible for value types to have a weak reference. Adding `class` to the protocol tells the compiler that only reference types will implement the protocol, making weak definitions legal. Since your view controller owns the delegate, and the delegate has a reference to the view controller, a memory leak will occur if the the delegates reference back to the view controller is not weak. – Alex Chase May 07 '19 at 19:43
  • Here's some more info on value vs reference types https://developer.apple.com/swift/blog/?id=10 – Alex Chase May 07 '19 at 19:43