I'm an OS X programming newbie, I have to take a bunch of NSImages arriving from the internet at a varying interval and construct a progressive movie file out of it. For example the rate of arrival of the NSImage could be anywhere from milli-seconds up to 3 seconds apart.
I'm wondering if this is the correct way to do it using QTKit, and how to calculate the frame rate since frames come in almost at random times.
I'm also wondering about memory usage, will it try to keep the whole movie in memory before writing it at the end?
Here is code I pieced together from the web:
-(void)startRecording
{
NSDictionary *myDict = nil;
myDict = [NSDictionary dictionaryWithObjectsAndKeys:@"mp4v",
QTAddImageCodecType,
[NSNumber numberWithLong:codecHighQuality],
QTAddImageCodecQuality,
nil];
long long timeValue = 1;
long timeScale = 3;
QTTime duration = QTMakeTime(timeValue, timeScale);
QTMovie *mMovie =[[QTMovie alloc] initToWritableFile:@"tmpfps.tmp" error:NULL];
self.mMovie=mMovie;
self.duration=duration;
self.myDict=myDict;
}
-(void) addFrame:(NSImage*)imageFrame
{
[mMovie addImage:imageFrame
forDuration:self.duration
withAttributes:self.myDict];
}
-(void) stopRecording
{
myDict = [NSDictionary dictionaryWithObject:[NSNumber numberWithBool:YES]
forKey:QTMovieFlatten];
[self.mMovie writeToFile:@"my.mov" withAttributes:myDict];
self.mMovie=nil;
}