3

I am developing an application to detect rectangles in a live camera feed and highlight the detected rectangle. I did camera thing using AVFoundation and used below methods in order to do detect and highlight the detected rectangle.

var detector: CIDetector?;

override func viewDidLoad() {
    super.viewDidLoad();

    detector = self.prepareRectangleDetector();
}

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) { // re check this method

    // Need to shimmy this through type-hell
    let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)

    // Force the type change - pass through opaque buffer
    let opaqueBuffer = Unmanaged<CVImageBuffer>.passUnretained(imageBuffer!).toOpaque()
    let pixelBuffer = Unmanaged<CVPixelBuffer>.fromOpaque(opaqueBuffer).takeUnretainedValue()

    let sourceImage = CIImage(CVPixelBuffer: pixelBuffer, options: nil)

    // Do some detection on the image
    self.performRectangleDetection(sourceImage);

    var outputImage = sourceImage

    // Do some clipping
    var drawFrame = outputImage.extent
    let imageAR = drawFrame.width / drawFrame.height
    let viewAR = videoDisplayViewBounds.width / videoDisplayViewBounds.height

    if imageAR > viewAR {
        drawFrame.origin.x += (drawFrame.width - drawFrame.height * viewAR) / 2.0
        drawFrame.size.width = drawFrame.height / viewAR
    } else {
        drawFrame.origin.y += (drawFrame.height - drawFrame.width / viewAR) / 2.0
        drawFrame.size.height = drawFrame.width / viewAR
    }

    //videoDisplayView is a GLKView which is used to display camera feed
    videoDisplayView.bindDrawable()
    if videoDisplayView.context != EAGLContext.currentContext() {
        EAGLContext.setCurrentContext(videoDisplayView.context)
    }

    // clear eagl view to grey
    glClearColor(0.5, 0.5, 0.5, 1.0);
    glClear(0x00004000)

    // set the blend mode to "source over" so that CI will use that
    glEnable(0x0BE2);
    glBlendFunc(1, 0x0303);

    renderContext.drawImage(outputImage, inRect: videoDisplayViewBounds, fromRect: drawFrame);

    videoDisplayView.display();

}

func prepareRectangleDetector() -> CIDetector {

    let options: [String: AnyObject] = [CIDetectorAccuracy: CIDetectorAccuracyHigh];
    return CIDetector(ofType: CIDetectorTypeRectangle, context: nil, options: options);
}

func performRectangleDetection(image: CIImage){

    let resultImage: CIImage? = nil;

    if let detector = detector {

        // Get the detections
        let features = detector.featuresInImage(image, options: [CIDetectorAspectRatio:NSNumber(float:1.43)]);


        if features.count != 0{ // feature found

            for feature in features as! [CIRectangleFeature] {

                self.previewImageView.layer.sublayers = nil;

                let line: CAShapeLayer = CAShapeLayer();
                line.frame = self.videoDisplayView.bounds;
                let linePath: UIBezierPath = UIBezierPath();

                linePath.moveToPoint(feature.topLeft);
                linePath.addLineToPoint(feature.topRight);
                linePath.addLineToPoint(feature.bottomRight);
                linePath.addLineToPoint(feature.bottomLeft);
                linePath.addLineToPoint(feature.topLeft);
                linePath.closePath();

                line.lineWidth = 5.0;
                line.path = linePath.CGPath;
                line.fillColor = UIColor.clearColor().CGColor;
                line.strokeColor = UIColor(netHex: 0x3399CC, alpha: 1.0).CGColor;

                // videoDisplayParentView is the parent of videoDisplayView and they both have same bounds
                self.videoDisplayParentView.layer.addSublayer(line);
             }    
         }                    
     }
}

I used CAShapeLayer and UIBezierPath to draw the rectangle. This is very very slow. Path gets visible after minutes.

Can Someone please help me to figure out why it is slow or let me know if I am doing something wrong here. Any help would be highly appreciated.

Or if there is some way easy than this I would like to know it too.

Brian
  • 14,610
  • 7
  • 35
  • 43
Hanushka Suren
  • 723
  • 3
  • 10
  • 32
  • any infos on what you tried so far? – nayana Jun 10 '16 at 12:19
  • Sorry I didn't get what you need to know, exactly. I have done detecting rectangle in a live video feed and i am getting correct points. Now I need to draw a rectangle on my live video feed using those 4 points . I used UIBezierPath and CAShapeLayer to draw that rectangle. But it is very slow. – Hanushka Suren Jun 10 '16 at 12:32
  • TL;DR .. ok .. thats what I didnt know that its slow until you edited.. – nayana Jun 10 '16 at 12:41
  • Ah! sorry about that. Mistakenly I pressed post button before finish typing. – Hanushka Suren Jun 10 '16 at 12:47

1 Answers1

0

If you get into the business of adding a sublayer to a GLKView it will be slow. The GLKView here refreshes multiple times every second (as it is in captureOutput:didOutputSampleBuffer:.. method), the process of creating and adding the sublayer every time will not be able to keep up with.

A better way is to draw the path using CoreImage and compositing it over resultImage.

OutOnAWeekend
  • 1,383
  • 1
  • 18
  • 36