I'm trying to take a set of file stored in one bucket and copy them to another (one is transient and one is temporary - the files are to be stored for a longer period of time). I'm using the NodeJS ForgeSDK, using getObject to download the file, then uploadObject to put it in the second bucket. This seems to work fine for most of the files I'm moving, but fails when I try to use it on a JSON file.
I've tried converting the JSON both to a buffer and to a string before the uploadObject call, both fail giving a gateway timeout error.
Any suggestions on how to make this work? Thanks!
static copyObjectToBucket(oAuth2TwoLegged, bucketName, copyToBucketName, objectName, access, callback) {
console.log(bucketName, copyToBucketName, objectName);
ObjectsApi.getObject(bucketName, objectName, {}, oAuth2TwoLegged, oAuth2TwoLegged.getCredentials())
.then((resp) => {
var headers = resp.headers;
var contentLength = headers['content-length'];
var body = resp.body;
if (objectName.includes('json')){
body = JSON.stringify(body[0]);
}
ObjectsApi.uploadObject(copyToBucketName, objectName, contentLength, body, {}, oAuth2TwoLegged, oAuth2TwoLegged.getCredentials())
.then(
(res) =>{
console.log(res);
callback;
}
)
.catch((err) =>{
console.log(err);
}
);
})
}