Long time lurker, first time poster.
I'm trying to decode a video and save each frame as an image. (The video is one I previously recorded that plays fine on the iPhone.) My first step is to make sure I'm getting correct image data, so this little snippet is only to show it in a UIImageView:
- (void)showFrame:(UIImage*)frame {
self.videoIV.image=frame;
}
- (void)unarchiveThread:(NSString*)fileName {
NSAutoreleasePool *pool=[[NSAutoreleasePool alloc] init];
AVURLAsset *asset = [[[AVURLAsset alloc] initWithURL:[NSURL fileURLWithPath:fileName] options:nil] autorelease];
if (asset==nil) {
NSLog(@"Couldn't load avasset from '%@'\n",fileName);
return;
}
AVAssetReader *reader=[[[AVAssetReader alloc] initWithAsset:asset error:NULL] autorelease];
NSArray* video_tracks = [asset tracksWithMediaType:AVMediaTypeVideo];
AVAssetTrack* video_track = [video_tracks objectAtIndex:0];
CGAffineTransform transform=video_track.preferredTransform;
NSDictionary* dictionary = [NSDictionary dictionaryWithObject:@(kCVPixelFormatType_32BGRA) forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
AVAssetReaderTrackOutput* asset_reader_output = [[[AVAssetReaderTrackOutput alloc] initWithTrack:video_track outputSettings:dictionary] autorelease];
[reader addOutput:asset_reader_output];
[reader startReading];
CMSampleBufferRef buffer;
while ([reader status]==AVAssetReaderStatusReading ) {
buffer = [asset_reader_output copyNextSampleBuffer];
if (buffer!=NULL) {
NSLog(@"buffer=%@",buffer);
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(buffer);
NSLog(@"pixel buffer=%@",pixelBuffer);
CIImage *cImage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
NSLog(@"cImage = %@",cImage);
cImage=[cImage imageByApplyingTransform:transform];
NSLog(@"transformed cImage = %@",cImage);
UIImage *image=[UIImage imageWithCIImage:cImage];
NSLog(@"image=%@",image);
[self performSelectorOnMainThread:@selector(showFrame:) withObject:image waitUntilDone:YES];
CMSampleBufferInvalidate(buffer);
CFRelease(buffer);
buffer = nil;
}
}
[pool drain];
}
The first two steps work fine (I get a valid buffer and pixel buffer) but the cIImage:imageWithCVPixelBuffer:
isn't working for some reason (get (null)). Here's my output:
buffer=CMSampleBuffer 0xba6e2d0 retainCount: 1 allocator: 0x4b43e7
invalid = NO
dataReady = YES
makeDataReadyCallback = 0x0
makeDataReadyRefcon = 0x0
formatDescription = <CMVideoFormatDescription 0xba6e260 [0x212eb48]> {
mediaType:'vide'
mediaSubType:'BGRA'
mediaSpecific: {
codecType: 'BGRA' dimensions: 1920 x 1080
}
extensions: {<CFBasicHash 0xba6e2a0 [0x212eb48]>{type = immutable dict, count = 5,
entries =>
2 : <CFString 0x4c0024 [0x212eb48]>{contents = "Version"} = <CFNumber 0xb8682b0 [0x212eb48]>{value = +2, type = kCFNumberSInt32Type}
3 : <CFString 0x4bffe4 [0x212eb48]>{contents = "CVBytesPerRow"} = <CFNumber 0xba6e1f0 [0x212eb48]>{value = +7680, type = kCFNumberSInt32Type}
4 : <CFString 0x466f1c [0x212eb48]>{contents = "CVImageBufferYCbCrMatrix"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
5 : <CFString 0x466f5c [0x212eb48]>{contents = "CVImageBufferColorPrimaries"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
6 : <CFString 0x466f8c [0x212eb48]>{contents = "CVImageBufferTransferFunction"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
}
}
}
sbufToTrackReadiness = 0x0
numSamples = 1
sampleTimingArray[1] = {
{PTS = {0/600 = 0.000}, DTS = {INVALID}, duration = {INVALID}},
}
imageBuffer = 0xba6dff0
pixel buffer=<CVPixelBuffer 0xba6dff0 width=1920 height=1080 bytesPerRow=7680 pixelFormat=BGRA attributes=<CFBasicHash 0xba6de30 [0x212eb48]>{type = immutable dict, count = 3,
entries =>
0 : <CFString 0x466cdc [0x212eb48]>{contents = "Height"} = <CFNumber 0x8e70ec0 [0x212eb48]>{value = +1080, type = kCFNumberSInt32Type}
1 : <CFString 0x466ccc [0x212eb48]>{contents = "Width"} = <CFNumber 0x8e70eb0 [0x212eb48]>{value = +1920, type = kCFNumberSInt32Type}
2 : <CFString 0x46707c [0x212eb48]>{contents = "PixelFormatType"} = <CFNumber 0xbb89710 [0x212eb48]>{value = +1111970369, type = kCFNumberSInt32Type}
}
propagatedAttachments=<CFBasicHash 0xba6e110 [0x212eb48]>{type = mutable dict, count = 3,
entries =>
0 : <CFString 0x466f1c [0x212eb48]>{contents = "CVImageBufferYCbCrMatrix"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
1 : <CFString 0x466f8c [0x212eb48]>{contents = "CVImageBufferTransferFunction"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
2 : <CFString 0x466f5c [0x212eb48]>{contents = "CVImageBufferColorPrimaries"} = <CFString 0x466f2c [0x212eb48]>{contents = "ITU_R_709_2"}
}
nonPropagatedAttachments=<CFBasicHash 0xba6dce0 [0x212eb48]>{type = mutable dict, count = 0,
entries =>
}
iosurface=0x0>
cImage = (null)
transformed cImage = (null)
image=<UIImage: 0x8c669d0>
Anyone have any idea what I'm doing wrong? I've searched Stack Overflow, and found lots of examples where this works (from a live feed) but nothing that uses a AVAssetReader and then converts the buffer from copyNextSampleBuffer to a CIImage.