1

I am trying to capture the face area.

Here is what I do , in didOutputMetadataObjects: is get the AVMetadataFaceObject and process it in didOutputSampleBuffer

didOutputMetadataObjects shows marker correctly, where I consider the Yaw, roll axis

What could be the best possible way, where I get only the face area and at the same time I see a face marker?

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputMetadataObjects:(NSArray *)metadataObjects fromConnection:(AVCaptureConnection *)connection
{

   for(AVMetadataObject *metaObject in metadataObjects){
        if([metaObject isKindOfClass:[AVMetadataFaceObject class ]] && metaObject.type == AVMetadataObjectTypeFace){
           AVMetadataFaceObject * adjustedMeta = (AVMetadataFaceObject*)[self.videoLayer transformedMetadataObjectForMetadataObject:metaObject];
           self.metaFaceObject= adjustedMeta;
           //Draw the face marker here
            }
    }
}

AVCaptureVideoDataOutputSampleBufferDelegate

-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection{

CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
        if(pixelBuffer ){
            CFDictionaryRef attachments = CMCopyDictionaryOfAttachments( kCFAllocatorDefault, sampleBuffer, kCMAttachmentMode_ShouldPropagate );
           CIImage *ciImage = [[CIImage alloc] initWithCVPixelBuffer:pixelBuffer options:(__bridge NSDictionary<NSString *,id> * _Nullable)(attachments)];
            ciImage = [ciImage imageByCroppingToRect:self.metaFaceObject.bounds];
            //This Image is upside down. Second thing the it does not have the face.
            UIImage *image=[UIImage imageWithCIImage:ciImage];


}
}
NNikN
  • 3,720
  • 6
  • 44
  • 86

1 Answers1

0

hello some suggests below:

1: add a stillImageOutPut

lazy var stillImageOutPut: AVCaptureStillImageOutput = {
    let imageOutPut = AVCaptureStillImageOutput.init()
    return imageOutPut
}()

2 add to session

if session.canAddOutput(stillImageOutPut){
            session.addOutput(stillImageOutPut)
}

3 then implement this delegate function

// MARK: AVCaptureMetadataOutputObjectsDelegate

extension ZHFaceDetectorViewController: AVCaptureMetadataOutputObjectsDelegate {
func metadataOutput(_ output: AVCaptureMetadataOutput, didOutput metadataObjects: [AVMetadataObject], from connection: AVCaptureConnection) {
    printLog(Thread.current)
    let metadataObject = metadataObjects.first
    if let object = metadataObject {
        while !hasDetectorFace {
            if object.type == AVMetadataObject.ObjectType.face{
                hasDetectorFace = true
                DispatchQueue.global().async {
                    if let stillImageConnection = self.stillImageOutPut.connection(with: AVMediaType.video){
                        printLog(stillImageConnection)
                        printLog(connection)
                        stillImageConnection.videoOrientation = AVCaptureVideoOrientation(rawValue: UIDevice.current.orientation.rawValue)!
                        /// prepare settings 如果不设置 截取照片时屏幕会闪白
                        let settings = AVCaptureAutoExposureBracketedStillImageSettings.autoExposureSettings(exposureTargetBias: AVCaptureDevice.currentExposureTargetBias)
                        /// begin capture
                        self.stillImageOutPut.prepareToCaptureStillImageBracket(from: stillImageConnection, withSettingsArray: [settings], completionHandler: { (complete, error) in
                            if error == nil {
                                self.stillImageOutPut.captureStillImageAsynchronously(from: stillImageConnection, completionHandler: { (imageDataSampleBuffer, error) in
                                    printLog(imageDataSampleBuffer)
                                    printLog(error)
                                    if error == nil {
                                        if let sampleBuffer = imageDataSampleBuffer {
                                            if let imageData = AVCaptureStillImageOutput.jpegStillImageNSDataRepresentation(sampleBuffer){
                                                if let image = UIImage(data: imageData) {
                                                    /// operater your image
                                                    printLog(image)
                                                }
                                            }
                                        }
                                    }else{
                                        printLog("something was wrong")
                                    }
                                })
                            }
                        })
                        
                    }
                }
            }
        }
    }
}

}

4 then i get my picture log info

Community
  • 1
  • 1
swift
  • 1