I have read all posts I can find on internet about this function, and I have some success to create the video file, but I have 3 problems remaining and seems like no one have mentioned that.
I have 3 problems:
the video can't play correctly on some players: quicktime(window), the video play for only one frame and the screen becomes white, and the video can not be played on youtube.
some images, for some reason, the image is very abnormal
http://lh3.googleusercontent.com/-Jyz-L1k3MEk/TjpfSfKf8LI/AAAAAAAADBs/D1GYuEqI-Oo/h301/1.JPG (ok, they say I'm a new user and don't allow me to post image in the post.)
some images, for some reason, orientation is not right, even if I transformed the context according to the orientation, it still not working.
Can some one help me on this please, thank you very much in advance!!
Here is my code:
1: use this function to create video with UIImage, I only used one image, and 1 audio file (caf), and I want to show that image while playing that audio.
- (void)writeImageAndAudioAsMovie:(UIImage*)image andAudio:(NSString *)audioFilePath duration:(int)duration {
NSLog(@"start make movie: length:%d",duration);
NSError *error = nil;
AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:[NSURL fileURLWithPath:ImageVideoPath] fileType:AVFileTypeMPEG4
error:&error];
NSParameterAssert(videoWriter);
if ([[NSFileManager defaultManager] fileExistsAtPath:ImageVideoPath])
[[NSFileManager defaultManager] removeItemAtPath:ImageVideoPath error:nil];
NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:AVVideoCodecH264, AVVideoCodecKey,
[NSNumber numberWithInt:image.size.width],AVVideoWidthKey,[NSNumber numberWithInt:image.size.height], AVVideoHeightKey,nil];
AVAssetWriterInput* writerInput = [[AVAssetWriterInput
assetWriterInputWithMediaType:AVMediaTypeVideo
outputSettings:videoSettings] retain];
AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor assetWriterInputPixelBufferAdaptorWithAssetWriterInput:writerInput sourcePixelBufferAttributes:nil];
NSParameterAssert(writerInput);
NSParameterAssert([videoWriter canAddInput:writerInput]);
writerInput.expectsMediaDataInRealTime = YES;
[videoWriter setShouldOptimizeForNetworkUse:YES];
[videoWriter addInput:writerInput];
//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];
//Write samples:
CVPixelBufferRef buffer = [self pixelBufferFromCGImage:image.CGImage];
[adaptor appendPixelBuffer:buffer withPresentationTime:kCMTimeZero];
//Finish the session:
[videoWriter endSessionAtSourceTime:CMTimeMake(duration, 1)];
[writerInput markAsFinished];
[videoWriter finishWriting];
CVPixelBufferPoolRelease(adaptor.pixelBufferPool);
[videoWriter release];
[writerInput release];
[self addAudioToFileAtPath:ImageVideoPath andAudioPath:audioFilePath];
}
2. Create CVPixelBufferRef for video
-(CVPixelBufferRef)pixelBufferFromCGImage: (CGImageRef) image{
float width = CGImageGetWidth(cgimage);
float height = CGImageGetHeight(cgimage);
NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
[NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
nil];
CVPixelBufferRef pxbuffer = NULL;
CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, width,height, kCVPixelFormatType_32ARGB,(CFDictionaryRef)options,&pxbuffer);
NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
NSParameterAssert(pxdata != NULL);
CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata,width,height,8,4*width,rgbColorSpace,kCGImageAlphaNoneSkipFirst);
NSParameterAssert(context);
CGContextDrawImage(context, CGRectMake(0, 0,width, height), cgimage);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);
CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
return pxbuffer;
}
3. put video and audio together
-(void) addAudioToFileAtPath:(NSString *)vidoPath andAudioPath:(NSString *)audioPath{
AVMutableComposition* mixComposition = [AVMutableComposition composition];
NSURL* audio_inputFileUrl = [NSURL fileURLWithPath:audioPath];
NSURL* video_inputFileUrl = [NSURL fileURLWithPath:vidoPath];
NSString *outputFilePath = FinalVideoPath;
NSURL* outputFileUrl = [NSURL fileURLWithPath:outputFilePath];
if ([[NSFileManager defaultManager] fileExistsAtPath:outputFilePath])
[[NSFileManager defaultManager] removeItemAtPath:outputFilePath error:nil];
AVURLAsset* audioAsset = [[AVURLAsset alloc]initWithURL:audio_inputFileUrl options:nil];
CMTimeRange audio_timeRange = CMTimeRangeMake(kCMTimeZero, audioAsset.duration);
AVMutableCompositionTrack *b_compositionAudioTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeAudio preferredTrackID:kCMPersistentTrackID_Invalid];
[b_compositionAudioTrack insertTimeRange:audio_timeRange ofTrack:[[audioAsset tracksWithMediaType:AVMediaTypeAudio] objectAtIndex:0] atTime:kCMTimeZero error:nil];
AVURLAsset* videoAsset = [[AVURLAsset alloc]initWithURL:video_inputFileUrl options:nil];
CMTimeRange video_timeRange = CMTimeRangeMake(kCMTimeZero,videoAsset.duration);
AVMutableCompositionTrack *a_compositionVideoTrack = [mixComposition addMutableTrackWithMediaType:AVMediaTypeVideo preferredTrackID:kCMPersistentTrackID_Invalid];
[a_compositionVideoTrack insertTimeRange:video_timeRange ofTrack:[[videoAsset tracksWithMediaType:AVMediaTypeVideo] objectAtIndex:0] atTime:kCMTimeZero error:nil];
//nextClipStartTime = CMTimeAdd(nextClipStartTime, a_timeRange.duration);
[audioAsset release];audioAsset = nil;
[videoAsset release];videoAsset = nil;
AVAssetExportSession* _assetExport = [[AVAssetExportSession alloc] initWithAsset:mixComposition presetName:AVAssetExportPresetHighestQuality];
_assetExport.outputFileType = AVFileTypeQuickTimeMovie;
_assetExport.outputURL = outputFileUrl;
[_assetExport exportAsynchronouslyWithCompletionHandler:
^(void ) {
switch (_assetExport.status)
{
case AVAssetExportSessionStatusCompleted:
//export complete
NSLog(@"Export Complete");
break;
case AVAssetExportSessionStatusFailed:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export error (see exportSession.error)
break;
case AVAssetExportSessionStatusCancelled:
NSLog(@"Export Failed");
NSLog(@"ExportSessionError: %@", [_assetExport.error localizedDescription]);
//export cancelled
break;
}
}];
}