10

Objects larger than 5GB are supported with multi-part uploading. We have 5gb+ files in one bucket already that I'd like to move to another, under the same AWS account. When I issue the command using s3cmd:

s3cmd mv s3://BUCKET1/OBJECT1 s3://BUCKET2[/OBJECT2]

I comes back with the error:

ERROR: S3 error: 400 (InvalidRequest): The specified copy source is larger than the maximum allowable size for a copy source: 5368709120

If it's at all possible, I presume it will not be able to be moved over without extra bandwidth/cost. Even so, I'm trying to figure out if you can move large files in some sort of multi-part method.

Ben Keating
  • 211
  • 1
  • 2
  • 5

2 Answers2

7

Currently, what you're trying to do can't be done in a single operation. Move in S3cmd to the API is essentially a copy and delete all in one and it's a restriction of the copy operation.

http://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectCOPY.html

You can store individual objects of up to 5 TB in Amazon S3. You create a copy of your object up to 5 GB in size in a single atomic operation using this API. However, for copying an object greater than 5 GB, you must use the multipart upload Upload Part - Copy API

http://docs.aws.amazon.com/AmazonS3/latest/dev/CopyingObjctsMPUapi.html

The examples in this section show you how to copy objects greater than 5 GB using the multipart upload API. You can copy objects less than 5 GB in a single operation.

emmdee
  • 2,187
  • 12
  • 36
  • 60
0

Here's how I accomplish this in a memory-efficient way with NodeJS.

function copyS3MP(from_bucket, from_key, to_bucket, to_key) {
    const AWS = require('aws-sdk');
    AWS.config.update({region: 'us-west-2'});

    let s3 = new AWS.S3();

    let head, uploadId, numParts, fileSize;

    let startTime = new Date();
    let partNum = 0;
    let partSize = 1024 * 1024 * 10; // 10mb chunks except last part
    let maxUploadTries = 3;

    let multiPartParams = {
        Bucket: to_bucket,
        Key: to_key,
        ContentType: getContentType(to_key)
    };

    let multipartMap = {
        Parts: []
    };

    function getHead() {
        return new Promise(async (resolve, reject) => {
            try {
                const h = await s3.headObject({
                    Bucket: from_bucket,
                    Key: from_key
                }).promise();

                resolve(h);
            } catch (e) {
                reject(e);
            }
        });
    }

    function createMultipartUpload() {
        return new Promise(async (resolve, reject) => {
            try {
                s3.createMultipartUpload(multiPartParams, function(mpErr, multipart) {
                    if (mpErr) {
                        console.error(mpErr);
                        return reject(mpErr);
                    }

                    console.log('Got upload ID', multipart.UploadId);

                    return resolve(multipart.UploadId);
                });
            } catch (e) {
                reject(e);
            }
        });
    }

    function copyPart(start, partNum) {
        let tryNum = 1;

        function copyLogic(copyParams) {
            return new Promise((resolve, reject) => {
                s3.uploadPartCopy(copyParams, function(multiErr, mData) {
                    if (multiErr) {
                        console.log('Upload part error:', multiErr);
                        return reject(multiErr);
                    } else {
                        multipartMap.Parts[this.request.params.PartNumber - 1] = {
                            ETag: mData.ETag,
                            PartNumber: Number(this.request.params.PartNumber)
                        };

                        console.log('Completed part', this.request.params.PartNumber);
                        console.log('mData', mData);

                        return resolve();
                    }
                }).on('httpUploadProgress', function(progress) {  console.log(Math.round(progress.loaded/progress.total*100)+ '% done') });
            });
        }

        return new Promise(async (resolve, reject) => {
            let end = Math.min(start + partSize, fileSize);

            try {
                let partParams = {
                    Bucket: to_bucket,
                    Key: to_key,
                    PartNumber: String(partNum),
                    UploadId: uploadId,
                    CopySource: `${from_bucket}/${from_key}`,
                    CopySourceRange: `bytes=${start}-${end - 1}`
                };

                while (tryNum <= maxUploadTries) {
                    try {
                        await copyLogic(partParams);
                        return resolve();
                    } catch (e) {
                        tryNum++;
                        if (tryNum <= maxUploadTries) {
                            console.log('Retrying copy of part: #', partParams.PartNumber);
                            await module.exports.sleep(1);
                        } else {
                            console.log('Failed uploading part: #', partParams.PartNumber);
                            return reject(e);
                        }
                    }
                }

                resolve();
            } catch (e) {
                return reject(e);
            }
        });
    }

    function completeMultipartUpload() {
        return new Promise((resolve, reject) => {
            let doneParams = {
                Bucket: to_bucket,
                Key: to_key,
                MultipartUpload: multipartMap,
                UploadId: uploadId
            };

            s3.completeMultipartUpload(doneParams, function(err, data) {
                if (err) {
                    return reject(err);
                }

                var delta = (new Date() - startTime) / 1000;
                console.log('Completed upload in', delta, 'seconds');
                console.log('Final upload data:', data);

                return resolve();
            });
        });
    }

    return new Promise(async (resolve, reject) => {
        try {
            head = await getHead();
            fileSize = head.ContentLength;
        } catch (e) {
            return reject(e);
        }

        numParts = Math.ceil(fileSize / partSize);

        console.log('Creating multipart upload for:', to_key);

        try {
            uploadId = await createMultipartUpload();
        } catch (e) {
            return reject(e);
        }

        for (let start = 0; start < fileSize; start += partSize) {
            partNum++;
            console.log("Part Num: " + partNum);

            try {
                await copyPart(start, partNum);
            } catch (e) {
                console.error(e);
                return reject(e);
            }
        }

        try {
            await completeMultipartUpload();
        } catch (e) {
            return reject(e);
        }

        resolve();
    });
}
egekhter
  • 113
  • 7