2

i want to record a window's snapshot to a mp4 file in a macOS app.

  1. start the creater with function [aMp4CreaterEntity startRecordWithSize:CGSizeMake(2320, 1080) pixelType:kCVPixelFormatType_32ARGB]
  2. run a timer 15 times per second, snapshotting the window, use the function CGWindowListCreateImage to get CGImageRef which width = 2320 and height = 1080,
  3. call the creater function [aMp4CreaterEntity recordImage:theCGImageRef timeStamp:[[NSDate date] timeIntervalSince1970]] to send CGImageRef to my aMp4CreaterEntity
  4. call [aMp4CreaterEntity stopRecord] function, and get the mp4 file at

everything runs alright, except the mp4 file's content contains only half of what was sent earlier, and the laster content was lost. and every CVPixelBufferRef was append by AVAssetWriterInputPixelBufferAdaptor when i debug.

At first I think the CMTime setting is wrong, but after modify it to half or double, the error is continue.

I'm new to audio and video, can someone help me solve this problem or explain it in detail?

BTW: i record the audio in an other file at the same time, but it has the same problem - earlier half content. and i can read the swift code direct.

this is my recorder sample code with Objective-C.

#import "Mp4Creater.h"
#import <AVFoundation/AVFoundation.h>

@interface Mp4Creater()

@property (nonatomic, strong) AVAssetWriter *videoWriter;
@property (nonatomic, strong) AVAssetWriterInput *videoInput;
@property (nonatomic, strong) AVAssetWriterInputPixelBufferAdaptor *videoAdaptor;
@property (nonatomic, strong) NSString *videoOutputPath;
@property (nonatomic, strong) NSDictionary *videoSettings;

@property (nonatomic, assign) NSTimeInterval startTs;
@property (nonatomic, assign) NSTimeInterval latestTs;

@property (nonatomic, strong) NSOperationQueue *opQueue;

@property (nonatomic, assign) BOOL isRecording;
@property (nonatomic, assign) NSUInteger frameRate; // 15
@property (nonatomic, assign) NSUInteger iFrameInterval; // 3s

@end

@implementation Mp4Creater

- (instancetype)init
{
   self = [super init];
   if (self) {
       _videoWriter = nil;
       _videoInput = nil;
       _videoAdaptor = nil;
       _videoOutputPath = nil;
       _videoSettings = nil;


       _startTs = -1;
       _latestTs = -1;
       _isRecording = NO;
       _frameRate = 15;
       _iFrameInterval = 3;
   }
   return self;
}

- (void)dealloc
{
   [_opQueue cancelAllOperations];
}

- (BOOL)addVideoInputWithSize:(CGSize)size pixelType:(UInt32)pixelType  {
   NSString *codecKey = AVVideoCodecTypeH264;

   _videoSettings = [NSDictionary dictionaryWithObjectsAndKeys:codecKey, AVVideoCodecKey,
                     [NSNumber numberWithInt: size.width], AVVideoWidthKey,
                     [NSNumber numberWithInt: size.height], AVVideoHeightKey,
                     [NSDictionary dictionaryWithObjectsAndKeys:AVVideoYCbCrMatrix_ITU_R_709_2, AVVideoYCbCrMatrixKey, AVVideoTransferFunction_ITU_R_709_2, AVVideoTransferFunctionKey, AVVideoColorPrimaries_ITU_R_709_2, AVVideoColorPrimariesKey, nil], AVVideoColorPropertiesKey,
                     [NSDictionary dictionaryWithObjectsAndKeys:
                      [NSNumber numberWithInt: size.width * size.height * 2], AVVideoAverageBitRateKey,
                      [NSNumber numberWithInt: (int)(_frameRate*_iFrameInterval)], AVVideoMaxKeyFrameIntervalKey,
                      [NSNumber numberWithInt: (int)(_iFrameInterval)], AVVideoMaxKeyFrameIntervalDurationKey,
                      AVVideoProfileLevelH264BaselineAutoLevel, AVVideoProfileLevelKey,
                      nil], AVVideoCompressionPropertiesKey,
                     nil];
   AVAssetWriterInput *videoInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeVideo outputSettings:_videoSettings];
   videoInput.expectsMediaDataInRealTime = YES;

   if ([_videoWriter canAddInput:videoInput]) {
       [_videoWriter addInput:videoInput];
       _videoInput = videoInput;
   }
   else {
       return NO;
   }
   NSDictionary *sourcePixelBufferAttributes = @{(NSString *)kCVPixelBufferPixelFormatTypeKey : @(pixelType)};
   _videoAdaptor = [[AVAssetWriterInputPixelBufferAdaptor alloc] initWithAssetWriterInput:_videoInput sourcePixelBufferAttributes:sourcePixelBufferAttributes];

   return YES;
}

- (BOOL)startRecordWithSize:(CGSize)size pixelType:(UInt32)pixelType {
   if (self.isRecording) {
       return YES;
   }
   self.startTs = -1;
   NSString *outputFile;
   NSString *guid = [[NSUUID new] UUIDString];
   NSString *fileName = [NSString stringWithFormat:@"video_%@.mp4", guid];
   outputFile = [NSTemporaryDirectory() stringByAppendingPathComponent:fileName];

   self.videoOutputPath = outputFile;

   NSError *error = nil;

   //----initialize compression engine
   self.videoWriter = [[AVAssetWriter alloc]initWithURL:[NSURL fileURLWithPath:self.videoOutputPath]
                                               fileType:AVFileTypeMPEG4
                                                  error:&error];
   self.videoWriter.shouldOptimizeForNetworkUse = YES;

   if(error) {
       return NO;
   }
   if (self.videoWriter == nil) {
       return NO;
   }
   if (![self addVideoInputWithSize:size pixelType:pixelType ]) {
       [self stopRecord];
       return NO;
   }

   self->_isRecording = YES;
   [self.videoWriter startWriting];
   [self.videoWriter startSessionAtSourceTime:kCMTimeZero];
   _opQueue = [[NSOperationQueue alloc] init];
   _opQueue.maxConcurrentOperationCount = 1;

   return YES;
}

- (void)stopRecord {
   if (!self.isRecording) {
       return;
   }
   [_opQueue cancelAllOperations];
   NSOperationQueue *oldQueue = _opQueue;
   _opQueue = nil;
   [oldQueue waitUntilAllOperationsAreFinished];

   if (self.videoInput != nil) {
       [self.videoInput markAsFinished];
   }
   self.videoInput = nil;
   self.videoAdaptor = nil;

   if (self.videoWriter != nil) {
       __block BOOL success = NO;
       if (self.videoWriter.status == AVAssetWriterStatusWriting) {
           success = YES;
       }
       [self.videoWriter finishWritingWithCompletionHandler:^{
           if (self.videoWriter.status == AVAssetWriterStatusCompleted) {
               if (success) {
                   return;
               }
           }
       }];
   }
   self->_isRecording = NO;
}


- (void)recordImage:(CGImageRef)image timeStamp:(NSTimeInterval)ts {

   CGImageRef retainImage = CGImageRetain(image);
   __weak __typeof__(self) weak_self = self;
   [_opQueue addOperationWithBlock:^{
       __typeof__(self) self = weak_self;
       if (!self.isRecording) {
           return;
       }
       if (self.startTs < 0) {
           self.startTs = ts;
       }
       self.latestTs = ts;
       CMTime cmTime = CMTimeMake((ts - self.startTs) * 1000, 1000);
       if (self.videoWriter != nil) {
           if (self.videoWriter.status == AVAssetWriterStatusWriting) {
               if (self.videoInput != nil && self.videoInput.isReadyForMoreMediaData) {
                   CVPixelBufferRef buffer = [self CVPixelBufferRefFromCGImage:retainImage];
                   if (buffer != NULL) {
                       [self.videoAdaptor appendPixelBuffer:buffer withPresentationTime:cmTime];
                       CVPixelBufferRelease(buffer);
                   }
               }
           }
       }
       CGImageRelease(retainImage);
   }];
}

- (CVPixelBufferRef)CVPixelBufferRefFromCGImage:(CGImageRef)image {
   size_t pixelsWide = CGImageGetWidth(image);
   size_t pixelsHigh = CGImageGetHeight(image);

   NSInteger bitmapBytesPerRow = (pixelsWide * 4);
   NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                            [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
   CVPixelBufferRef pxbuffer = NULL;
   CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, pixelsWide, pixelsHigh, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef)options, &pxbuffer);

   NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);
   if (status == kCVReturnSuccess) {
       CVPixelBufferLockBaseAddress(pxbuffer, 0);
       void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
       NSParameterAssert(pxdata != NULL);

       CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
       CGContextRef context = CGBitmapContextCreate(pxdata, pixelsWide, pixelsHigh, 8, bitmapBytesPerRow, rgbColorSpace, (CGBitmapInfo)kCGImageAlphaPremultipliedFirst);
       NSParameterAssert(context);
       CGContextConcatCTM(context, CGAffineTransformIdentity);
       CGContextDrawImage(context, CGRectMake(0, 0, pixelsWide, pixelsHigh), image);
       CVPixelBufferUnlockBaseAddress(pxbuffer, 0);
       CGColorSpaceRelease(rgbColorSpace);
       CGContextRelease(context);
       return pxbuffer;
   }
   else {
       return nil;
   }
}

@end
guojing
  • 129
  • 14

0 Answers0