3

I am trying to add Time Lapse Feature on my IOS Application. Can anybody Help me by giving resource/ How to implement this? I want to capture video from my application using Time Lapse mode which is on IOS8 new feature. I didnt Find any resources for Developer. If you can it will be great for my self

Mehedi Hasan
  • 359
  • 3
  • 10

1 Answers1

1

You could use the AVFoundation functionality to combine photos into a video. You can do this using AVAssetWriter

AVAssetWriter *videoWriter = [[AVAssetWriter alloc] initWithURL:
                              [NSURL fileURLWithPath:videoOutputPath] fileType:AVFileTypeQuickTimeMovie
                                                          error:&error];
NSParameterAssert(videoWriter);

NSDictionary *videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:
                               AVVideoCodecH264, AVVideoCodecKey,
                               [NSNumber numberWithInt:imageSize.width], AVVideoWidthKey,
                               [NSNumber numberWithInt:imageSize.height], AVVideoHeightKey,
                               nil];

AVAssetWriterInput* videoWriterInput = [AVAssetWriterInput
                                        assetWriterInputWithMediaType:AVMediaTypeVideo
                                        outputSettings:videoSettings];

AVAssetWriterInputPixelBufferAdaptor *adaptor = [AVAssetWriterInputPixelBufferAdaptor
                                                 assetWriterInputPixelBufferAdaptorWithAssetWriterInput:videoWriterInput
                                                 sourcePixelBufferAttributes:nil];

NSParameterAssert(videoWriterInput);
NSParameterAssert([videoWriter canAddInput:videoWriterInput]);
videoWriterInput.expectsMediaDataInRealTime = YES;
[videoWriter addInput:videoWriterInput];

//Start a session:
[videoWriter startWriting];
[videoWriter startSessionAtSourceTime:kCMTimeZero];

CVPixelBufferRef buffer = NULL;

//convert uiimage to CGImage.
int frameCount = 0;
double numberOfSecondsPerFrame = 2;
double frameDuration = fps * numberOfSecondsPerFrame;

And then append the images(Buffers) together

for(UIImage * img in imageArray)
{
    progress.completedUnitCount++;

    buffer = [self pixelBufferFromCGImage:[img CGImage] ImageSize:imageSize];

    BOOL append_ok = NO;
    int j = 0;
    while (!append_ok && j < 30) {
        if (adaptor.assetWriterInput.readyForMoreMediaData)  {
            //print out status:
            NSLog(@"Processing video frame (%d,%lu)",frameCount,(unsigned long)[imageArray count]);

            CMTime frameTime = CMTimeMake(frameCount*frameDuration,(int32_t) fps);
            append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
            if(!append_ok){
                NSError *error = videoWriter.error;
                if(error!=nil) {
                    NSLog(@"Unresolved error %@,%@.", error, [error userInfo]);
                }
            }
        }
        else {
            printf("adaptor not ready %d, %d\n", frameCount, j);
            [NSThread sleepForTimeInterval:0.1];
        }
        j++;
    }
    if (!append_ok) {
        printf("error appending image %d times %d\n, with error.", frameCount, j);
    }
    frameCount++;
}
[videoWriterInput markAsFinished];
[videoWriter finishWritingWithCompletionHandler:^{}];

Here is the buffer code

- (CVPixelBufferRef) pixelBufferFromCGImage: (CGImageRef) image ImageSize:(CGSize)size {

NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                         [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey,
                         nil];
CVPixelBufferRef pxbuffer = NULL;

CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault,
                                      size.width,
                                      size.height,
                                      kCVPixelFormatType_32ARGB,
                                      (__bridge CFDictionaryRef) options,
                                      &pxbuffer);
if (status != kCVReturnSuccess){
    NSLog(@"Failed to create pixel buffer");
}

CVPixelBufferLockBaseAddress(pxbuffer, 0);
void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);

CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef context = CGBitmapContextCreate(pxdata, size.width,
                                             size.height, 8, 4*size.width, rgbColorSpace,
                                             kCGImageAlphaPremultipliedFirst);
//kCGImageAlphaNoneSkipFirst);
CGContextConcatCTM(context, CGAffineTransformMakeRotation(0));
CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image),
                                       CGImageGetHeight(image)), image);
CGColorSpaceRelease(rgbColorSpace);
CGContextRelease(context);

CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

return pxbuffer;

}