3

function uploadGuideMedia(req, res, next) {
    if (!req.file && !req.body.id) {
        return res.status(401).send({
            status: 401,
            data: "Upload a file with 'file' key"
        });
    }
    db.query('SELECT * from guide_media where _id = $1', [req.body.id], function(err, guide) {
        if (err) return next(err);
        if (guide.rows.length == 0) {
            return res.status(404).send({
                status: 404,
                data: "Guide_media not found"
            })
        } else {
            let name = req.file.path + path.extname(req.file.originalname);
            fs.renameSync(req.file.path, name);
            req.file.path = name;
            var s3bucket = new AWS.S3({
                params: {
                    Bucket: 'mapery-v2'
                }
            });
            fs.readFile(req.file.path, function(err, lq_file) {
                s3bucket.createBucket(function() {
                    var params = {
                        Key: 'upload-v2/' + req.file.originalname,
                        Body: lq_file,
                        ContentType: req.file.mimetype,
                        ACL: 'public-read'
                    };
                    s3bucket.upload(params, function(err, aws_images) {
                        fs.unlink(req.file.path, function(err) {
                            db.query('UPDATE guide_media SET image_path = $1 WHERE _id = $2  RETURNING *', [aws_images.Location, req.body.id], function(err, guide_res) {
                                if (err) return next(err);
                                return res.status(200).send({
                                    status: 'success',
                                    data: guide_res.rows
                                });
                            });
                        });
                    })
                })
            })
        }
    });
}
I am trying to upload video files to the AWS, but when the size of a file is greater than 5mb it is uploading as a part(2 parts for 5mb and 3 parts for 10mb). But in this https://aws.amazon.com/s3/faqs/#How_much_data_can_I_store they said we can upload up to 5gb in single upload operation. Am I wrong anywhere??
M gowda
  • 203
  • 6
  • 14
  • for stability and speed AWS by default uses a multipart upload, if you dont specify the max-uploads by default it takes 5mb chunks. If you want to upload in 1go, you need to set the max-uploads to 1 I believe. – jarnohenneman Dec 04 '17 at 10:00
  • thank you for your response. I need to specify that in `params` variable?? – M gowda Dec 04 '17 at 10:04

1 Answers1

4

Uploading in parts doesn't mean your file will be stored on S3 in multiple parts. If the payload is large enough, the SDK uploads the file in multiple parts concurrently so that uploads can be retried in parts if one of them fails. By default, the SDK uses a partSize of 5MB. You can set the partSize to 10 MB using the following code.

var params = {Bucket: 'bucket', Key: 'key', Body: stream};
var options = {partSize: 10 * 1024 * 1024, queueSize: 1};
s3.upload(params, options, function(err, data) {
  console.log(err, data);
});

EDIT: Link to the documentation for the upload method. http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property

Siddharth Nayar
  • 836
  • 7
  • 6