In my project, I need to capture the frame when video started.And I capture it from the delegate
"- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
",and use "appendPixelBuffer:newPixBuffer withPresentationTime:tmpTime"
to save the modified frame. These are ok! But, when I finisheded recording once, the avassetwriter's status can change to completed, but the mp4 file can not be save.
Here is my some code :
configueration:
-(void)configRecording:(NSURL*)fileUrl
{ NSError* error;
_recordingFilePath = fileUrl; NSDictionary *videoCleanApertureSettings = @{ AVVideoCleanApertureWidthKey: [NSNumber numberWithFloat:SCREEN_WIDTH], AVVideoCleanApertureHeightKey: [NSNumber numberWithFloat:SCREEN_WIDTH], AVVideoCleanApertureHorizontalOffsetKey: [NSNumber numberWithInt:10], AVVideoCleanApertureVerticalOffsetKey: [NSNumber numberWithInt:10], }; NSDictionary *videoCompressionSettings = @{ AVVideoAverageBitRateKey: [NSNumber numberWithFloat:2000000.0], AVVideoMaxKeyFrameIntervalKey: [NSNumber numberWithInteger:1], AVVideoProfileLevelKey: AVVideoProfileLevelH264Baseline30, AVVideoCleanApertureKey: videoCleanApertureSettings, }; NSDictionary* videoSetting = [NSDictionary dictionaryWithObjectsAndKeys: AVVideoCodecH264, AVVideoCodecKey, [NSNumber numberWithInt:SCREEN_WIDTH],AVVideoWidthKey, [NSNumber numberWithInt:SCREEN_WIDTH],AVVideoHeightKey, videoCompressionSettings,AVVideoCompressionPropertiesKey, AVVideoScalingModeResizeAspectFill,AVVideoScalingModeKey, nil]; _avassetWriter = [AVAssetWriter assetWriterWithURL:fileUrl fileType:AVFileTypeMPEG4 error:&error]; // Add the audio input AudioChannelLayout acl; bzero( &acl, sizeof(acl)); acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono; NSDictionary* audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys: [ NSNumber numberWithInt: kAudioFormatAppleLossless ], AVFormatIDKey, [ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey, [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey, [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey, [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey, nil ]; _avassetWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:videoSetting]; _avassetWriterInput.expectsMediaDataInRealTime = YES; _audioWriterInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioOutputSettings]; _audioWriterInput.expectsMediaDataInRealTime = YES; _avassetAdaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:_avassetWriterInput sourcePixelBufferAttributes:videoSetting]; if ([_avassetWriter canAddInput:_avassetWriterInput] ) { [_avassetWriter addInput:_avassetWriterInput]; } if ([_avassetWriter canAddInput:_audioWriterInput]) { [_avassetWriter addInput:_audioWriterInput]; } BOOL canOutput = [_avassetWriter canApplyOutputSettings:videoSetting forMediaType:AVFileTypeMPEG4]; NSLog(@"can out put := %d", canOutput);
}
this is my start recording function:
[_avassetWriter startWriting];
[_avassetWriter startSessionAtSourceTime:kCMTimeZero];
3.this is the way to modify the frame:
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
int faceTrackResult = [faceTracker startFacialTracking:pixelBuffer
startFaceTracking:YES
withFaceRectangle:_faceRect
withRollAngle:_rollAngle];
CVPixelBufferRetain(pixelBuffer);
dispatch_async(dispatch_get_main_queue(), ^{
[_trackerView drawSceneWithBackground:pixelBuffer];
CVPixelBufferRelease(pixelBuffer);
});
if (cameraView.isRecording == YES) {
CMTime tmpTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
[cameraView getLastRecordingTime:tmpTime];
//开始捕捉视频帧
CGImageRef curFrameRef = [cameraView getImageRef:self.trackerView.frame];
CVPixelBufferRef newPixBuffer = [self pixelBufferFromCGImage:curFrameRef];
if (cameraView.avassetWriter.status == AVAssetWriterStatusWriting && cameraView.isRecording == YES) {
BOOL result = [cameraView.avassetAdaptor appendPixelBuffer:newPixBuffer withPresentationTime:tmpTime];
NSLog(@"result := %d; status := %ld", result, (long)cameraView.avassetWriter.status);
}
}
}
This is my stop recording function:
-(void)stopRecording { WS(weakSelf);
[_avassetWriterInput markAsFinished]; [_avassetWriter endSessionAtSourceTime: lastSimple_Time]; if (_avassetWriter.status != AVAssetWriterStatusCompleted) { NSLog(@"error := %@", _avassetWriter.error); } self.isRecording = NO; [_avassetWriter finishWritingWithCompletionHandler:^{ if (_avassetWriter.status == AVAssetWriterStatusFailed) { NSLog(@"error := %@", _avassetWriter.error); } if (weakSelf.recordDelegate && [weakSelf.recordDelegate respondsToSelector:@selector(finishRecording:)]) { [weakSelf.recordDelegate finishRecording:weakSelf.recordingFilePath]; } }]; }