I am trying to make a Face detector using CIDetector
that enables a button as long as the face is detected. The part that I search for and I couldn't find is how to make the code trigger a function when it detects a face. And disable it when the face leaves camera frame.
Here is the code that I have until now:
.h file:
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@property (weak, nonatomic) IBOutlet UIButton *actionButton;
//Update 2:
@property (weak, nonatomic) IBOutlet UIView *containerView;
- (IBAction)actionButton:(id)sender;
@end
.m file:
#import "ViewController.h"
@import AVFoundation;
@interface ViewController () <AVCaptureMetadataOutputObjectsDelegate> {
AVCaptureVideoPreviewLayer *_previewLayer;
AVCaptureSession *_session;
CIDetector *_faceDetector;
CIContext *_ciContext;
}
@end
@implementation SCViewController
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
// Create a new AVCaptureSession
_session = [[AVCaptureSession alloc] init];
[_session setSessionPreset:AVCaptureSessionPreset640x480];
AVCaptureDevice *device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
NSError *error = nil;
AVCaptureDeviceInput *input = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if(input) {
// Add the input to the session
[_session addInput:input];
} else {
NSLog(@"error: %@", error);
return;
}
AVCaptureMetadataOutput *output = [[AVCaptureMetadataOutput alloc] init];
// Have to add the output before setting metadata types
[_session addOutput:output];
// Restrict the output metadata to faces
[output setMetadataObjectTypes:@[AVMetadataObjectTypeFace]];
// This VC is the delegate. Please call us on the main queue
[output setMetadataObjectsDelegate:self queue:dispatch_get_main_queue()];
// Display on screen
_previewLayer = [AVCaptureVideoPreviewLayer layerWithSession:_session];
_previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
_previewLayer.bounds = self.view.bounds;
_previewLayer.position = CGPointMake(CGRectGetMidX(self.view.bounds), CGRectGetMidY(self.view.bounds));
// Update 2 change
[self.containerView.layer addSublayer:_previewLayer];
// Hide the button
self.actionButton.hidden = YES;
// Start the AVSession running
[_session startRunning];
}
// Update 1:
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputMetadataObjects:(NSArray *)metadataObjects
fromConnection:(AVCaptureConnection *)connection
{
for(AVMetadataObject *metadataObject in metadataObjects) {
if([metadataObject.type isEqualToString:AVMetadataObjectTypeFace]) {
self.retakeButton.hidden = NO;
}
}
}
- (IBAction)actionButton:(id)sender {
}
@end