function uploadGuideMedia(req, res, next) {
if (!req.file && !req.body.id) {
return res.status(401).send({
status: 401,
data: "Upload a file with 'file' key"
});
}
db.query('SELECT * from guide_media where _id = $1', [req.body.id], function(err, guide) {
if (err) return next(err);
if (guide.rows.length == 0) {
return res.status(404).send({
status: 404,
data: "Guide_media not found"
})
} else {
let name = req.file.path + path.extname(req.file.originalname);
fs.renameSync(req.file.path, name);
req.file.path = name;
var s3bucket = new AWS.S3({
params: {
Bucket: 'mapery-v2'
}
});
fs.readFile(req.file.path, function(err, lq_file) {
s3bucket.createBucket(function() {
var params = {
Key: 'upload-v2/' + req.file.originalname,
Body: lq_file,
ContentType: req.file.mimetype,
ACL: 'public-read'
};
s3bucket.upload(params, function(err, aws_images) {
fs.unlink(req.file.path, function(err) {
db.query('UPDATE guide_media SET image_path = $1 WHERE _id = $2 RETURNING *', [aws_images.Location, req.body.id], function(err, guide_res) {
if (err) return next(err);
return res.status(200).send({
status: 'success',
data: guide_res.rows
});
});
});
})
})
})
}
});
}
Asked
Active
Viewed 3,400 times
3

M gowda
- 203
- 6
- 14
-
for stability and speed AWS by default uses a multipart upload, if you dont specify the max-uploads by default it takes 5mb chunks. If you want to upload in 1go, you need to set the max-uploads to 1 I believe. – jarnohenneman Dec 04 '17 at 10:00
-
thank you for your response. I need to specify that in `params` variable?? – M gowda Dec 04 '17 at 10:04
1 Answers
4
Uploading in parts doesn't mean your file will be stored on S3 in multiple parts. If the payload is large enough, the SDK uploads the file in multiple parts concurrently so that uploads can be retried in parts if one of them fails. By default, the SDK uses a partSize of 5MB. You can set the partSize to 10 MB using the following code.
var params = {Bucket: 'bucket', Key: 'key', Body: stream};
var options = {partSize: 10 * 1024 * 1024, queueSize: 1};
s3.upload(params, options, function(err, data) {
console.log(err, data);
});
EDIT: Link to the documentation for the upload method. http://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property

Siddharth Nayar
- 836
- 7
- 6
-
1I have changed the partSize to 1gb and trying to upload 30mb file but it is not working – M gowda Dec 04 '17 at 11:37
-
-
1@SiddharthNayar the upper size limit for each individual part is 5GB. – Michael - sqlbot Dec 04 '17 at 23:23
-
1@Mgowda you need to be more specific than saying "it's not working." – Michael - sqlbot Dec 04 '17 at 23:24