I am using DigitalOcean Spaces along with nodejs to upload large files in parts. I'm uploading the file from a ~400mb stream using the following function:
const { S3 } = require('@aws-sdk/client-s3')
const { Upload } = require('@aws-sdk/lib-storage')
module.exports.uploadFileFromStream = async (fileName, readableStream) => {
const stream = require('stream')
var pass = new stream.PassThrough()
const s3Client = new S3({
endpoint: process.env.s3_endpoint,
region: process.env.s3_region,
credentials: {
accessKeyId: process.env.s3_akey,
secretAccessKey: process.env.s3_sakey,
},
})
try {
const u = new Upload({
client: s3Client,
params: {
Bucket: process.env.s3_bucket,
Key: fileName,
Body: pass,
ACL: 'private',
},
queueSize: 4,
partSize: 1024 * 1024 * 5,
leavePartsOnError: false,
})
readableStream.pipe(pass)
await u.done()
} catch (err) {
console.log(err)
}
}
After uploading about 38-39 parts (~200mb), the upload hangs.
I've tried changing the part size and it still hangs after about 200mb (less or more).
It hangs usually after ~30sec.
Using another method, I saw the connection may be reset. What could be the reason and how could I fix this?