0

Right now I am using the following code.But something weird is happening.

PHLivePhoto *livePhoto = .....;

NSString *fileName = [[NSProcessInfo processInfo] globallyUniqueString];
NSURL *url = [NSURL fileURLWithPath: NSTemporaryDirectory()];
url = [url URLByAppendingPathComponent: fileName];
NSData *data = [NSKeyedArchiver archivedDataWithRootObject: livePhoto];
[data writeToURL: url atomically: YES];

CKAsset *asset = [[CKAsset alloc] initWithFileURL: url];

And then use CKModifyRecordsOperation to save that asset to iCloud,and then fetch back.

NSData *data = [NSData dataWithContentsOfURL: fetchedAsset.fileURL];
PHLivePhoto *thePhoto = [NSKeyedUnarchiver unarchiveObjectWithData: data];
PHLivePhotoView *photoView = ......;
photoView.livePhoto = thePhoto;

Most of it works,except that when the photoView stops playing back,the photoView's image just disappears.If I long-touch it again, it plays back normally.

Why is this happening?

Li Fumin
  • 1,383
  • 2
  • 15
  • 31

1 Answers1

2

It appears that NSKeyedArchiver does not save live photo as expected. As one of the solutions, you should dismantle PHLivePhoto and retrieve separately video and still image, then upload them to iCloud:

#import <Photos/Photos.h>
#import <CloudKit/CloudKit.h>
#import <MobileCoreServices/MobileCoreServices.h>

+ (void) disassembleLivePhoto:(nonnull PHLivePhoto*)livePhoto completion:(void (^__nonnull)(UIImage * _Nullable stillImage, AVURLAsset * _Nullable video, NSURL* _Nullable imageURL, NSURL* _Nullable videoURL))block
{
    NSArray<PHAssetResource*>* resources = [PHAssetResource assetResourcesForLivePhoto:livePhoto];

    __block PHAssetResource *resImage = nil, *resVideo = nil;
    NSString *fileName = [[NSUUID UUID] UUIDString];
    NSURL *urlMov = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[fileName stringByAppendingPathExtension:@"mov"]]];
    NSURL *urlImg = [NSURL fileURLWithPath:[NSTemporaryDirectory() stringByAppendingPathComponent:[fileName stringByAppendingPathExtension:@"jpg"]]];

    [resources enumerateObjectsUsingBlock:^(PHAssetResource * _Nonnull res, NSUInteger idx, BOOL * _Nonnull stop) {
        if (res.type == PHAssetResourceTypePairedVideo){
            resVideo = res;
        } else if (res.type == PHAssetResourceTypePhoto){
            resImage = res;
        }
    }];

    [[PHAssetResourceManager defaultManager] writeDataForAssetResource:resVideo toFile:urlMov options:nil completionHandler:^(NSError * _Nullable error) {    
        [[PHAssetResourceManager defaultManager] writeDataForAssetResource:resImage toFile:urlImg options:nil completionHandler:^(NSError * _Nullable error) {
            block([UIImage imageWithData:[NSData dataWithContentsOfURL:urlImg]], [AVURLAsset assetWithURL:urlMov], urlImg, urlMov);
        }];
    }];
}

- (void) sendLivePhotoComponentsWithImageURL:(nonnull NSURL*)urlImage videoURL:(nonnull NSURL*)urlVideo completionBlock:(void(^ __nonnull)(CKRecord* __nullable recordLivePhoto))block
{    
    CKAsset *assetVideo = [[CKAsset alloc] initWithFileURL:urlVideo];
    CKAsset *assetImage = [[CKAsset alloc] initWithFileURL:urlImage];

    CKContainer *ckcContainer = [CKContainer defaultContainer];
    CKDatabase *ckdbPublic = [ckcContainer publicCloudDatabase];    // in this example I use public DB

    [ckcContainer fetchUserRecordIDWithCompletionHandler:^(CKRecordID * _Nullable ownerRecordID, NSError * _Nullable error) {
        CKRecordID *recordID = [[CKRecordID alloc] initWithRecordName:@"your_record_name_e.g._UUID" zoneID:ownerRecordID.zoneID]; 
        CKRecord *record = [[CKRecord alloc] initWithRecordType:@"your_record_type" recordID:recordID];
        record[@"your_video_asset_CK_key"] = assetVideo;
        record[@"your_image_asset_CK_key"] = assetImage;

        CKModifyRecordsOperation * op = [[CKModifyRecordsOperation alloc] initWithRecordsToSave:@[record] recordIDsToDelete:nil];
        op.modifyRecordsCompletionBlock = ^void(NSArray<CKRecord *> * _Nullable savedRecords, NSArray<CKRecordID *> * _Nullable deletedRecordIDs, NSError * _Nullable operationError){
            block(savedRecords.firstObject);    // Done.
        }; 

        op.qualityOfService = NSQualityOfServiceUserInitiated;

        [ckdbPublic addOperation:op];
    }];
}

Second part (retrieval from iCloud) has a little 'trick' - you should ensure that both image and video have the same asset ID contained in metadata, otherwise iOS won't know that these two pieces (video and image) belong to one compound asset – live photo - and will fail to assemble them into single correct PHLivePhoto object (in this case, however, most probably you will get PHLivePhoto, but it will be constructed as a still photo, without animation).

The easiest way here is to extract asset ID from the video asset and then modify the image part by assigning it the same ID:

- (void) assembleLivePhotoWithCKRecord:(nonnull CKRecord*)record completion:(void (^__nullable)(PHLivePhoto* _Nullable livePhoto))block
{
    // Operational data
    CKAsset *assetVideo = record[@"your_video_asset_CK_key"];
    CKAsset *assetImage = record[@"your_image_asset_CK_key"];

    // Get video and prepare local URLs
    NSString *fileName = [[NSUUID UUID] UUIDString];
    NSString *pathVideo = [NSTemporaryDirectory() stringByAppendingPathComponent:[fileName stringByAppendingPathExtension:@"mov"]];
    NSString *pathImage = [NSTemporaryDirectory() stringByAppendingPathComponent:[fileName stringByAppendingPathExtension:@"jpg"]];
    NSURL *urlVideo = [NSURL fileURLWithPath:pathVideo];
    NSURL *urlImage = [NSURL fileURLWithPath:pathImage];

    NSData *dataVideo = [NSData dataWithContentsOfURL:assetVideo.fileURL];
    [[NSFileManager defaultManager] createFileAtPath:pathVideo contents:dataVideo attributes:nil];

    // Getting video asset ID from metadata
    NSString *metaID = nil;
    NSArray<AVMetadataItem*>* metadata = [[AVURLAsset assetWithURL:urlVideo] metadata];
    for (AVMetadataItem *md in metadata){
        if ([md.identifier containsString:@"com.apple.quicktime.content.identifier"]){
            metaID = (NSString*)(md.value);
            break;
        }
    }

    // Get image
    NSData *dataImage = [NSData dataWithContentsOfURL:assetImage.fileURL];
    UIImage *image = [UIImage imageWithData:dataImage];
    CGImageRef ref = [image CGImage];

    // Update image's metadata to make it conform video metadata
    NSDictionary *imgMetadata = @{@"{MakerApple}": @{@"17": metaID}};
    NSMutableData *imageData = [NSMutableData new];
    CGImageDestinationRef dest = CGImageDestinationCreateWithData((CFMutableDataRef)imageData, kUTTypeJPEG, 1, nil);
    CGImageDestinationAddImage(dest, ref, (CFDictionaryRef)imgMetadata);
    CGImageDestinationFinalize(dest);

    [imageData writeToFile:pathImage atomically:YES];


    [PHLivePhoto requestLivePhotoWithResourceFileURLs:@[urlImage, urlVideo] placeholderImage:nil targetSize:CGSizeZero contentMode:PHImageContentModeAspectFit resultHandler:^(PHLivePhoto * _Nullable livePhoto, NSDictionary * _Nonnull info) {
        block(livePhoto);       // NOTE: this block may be called several times
    }];
}

The resulting block with the live photo may be called several times as per Apple's documentation (see PHLivePhoto.h for more info):

The result handler will be called multiple times to deliver new PHLivePhoto instances with increasingly more content.

Also, please keep in mind that you should add all necessary checks (there is quite a bit of them, actually) and handlers for errors etc.

degapps
  • 794
  • 5
  • 10