43

I'm writing an application which downloads images from a url and then uploads it to an S3 bucket using the aws-sdk.

Perviously I was just downloading images and saving them to disk like this.

request.head(url, function(err, res, body){

    request(url).pipe(fs.createWriteStream(image_path));

});

And then uploading the images to AWS S3 like this

fs.readFile(image_path, function(err, data){
    s3.client.putObject({
        Bucket: 'myBucket',
        Key: image_path,
        Body: data
        ACL:'public-read'
    }, function(err, resp) {
        if(err){
            console.log("error in s3 put object cb");
        } else { 
            console.log(resp);
            console.log("successfully added image to s3");
        }
    });
});

But I would like to skip the part where I save the image to disk. Is there some way I can pipe the response from request(url) to a variable and then upload that?

Loourr
  • 4,995
  • 10
  • 44
  • 68

6 Answers6

42

Here's some javascript that does this nicely:

    var options = {
        uri: uri,
        encoding: null
    };
    request(options, function(error, response, body) {
        if (error || response.statusCode !== 200) { 
            console.log("failed to get image");
            console.log(error);
        } else {
            s3.putObject({
                Body: body,
                Key: path,
                Bucket: 'bucket_name'
            }, function(error, data) { 
                if (error) {
                    console.log("error downloading image to s3");
                } else {
                    console.log("success uploading to s3");
                }
            }); 
        }   
    });
Loourr
  • 4,995
  • 10
  • 44
  • 68
  • 13
    As written, the code loads the entire body into memory at once (as a string into the "body" variable). That is, this does not stream directly from request to S3. OTOH, request will create a Buffer object for "body" if "encoding" is null; see https://github.com/request/request#requestoptions-callback. I suggested an edit to this answer to change `encoding:'binary'` to `encoding:null` and eliminate `body=new Buffer(body,'binary')`. That would remove the need to store the entire "body" in memory, and I think that's in keeping with the original question and answer. But reviews wanted a comment ... – Armadillo Jim Nov 10 '15 at 04:30
  • 2
    I tried your approach, both with implicit and explicit encoding, I find that my uploaded png files are corrupted for some reason, can't figure out why. Trying to copy this image https://openclipart.org/image/250px/svg_to_png/264091/MirrorCarp.png and this is what I get on my bucket http://images.quickhunts.com/clipart/23234234234.png – Ilan lewin Jan 06 '17 at 20:03
  • @Ilanlewin It definitely works with `png` images but make sure you're implementing the `fs.readFile` correctly. It may have changed since I originally wrote this answer, you may need to be more specific with encoding. Also possibly try some `jpg`s or other generic images. – Loourr Jan 13 '17 at 21:32
  • I was trying to store PDF from remote URL to S3. But PDF is corrupted after uploading. @ArmadilloJim `s fix with ```encoding: null``` seems to be working for me. – megapixel23 Oct 18 '17 at 10:36
  • Can we do the same in iOS from the app side? – CodeCracker Aug 05 '19 at 11:37
  • I don't see why not. You'll likely need to understand objective C streaming behavior though. – Loourr Aug 05 '19 at 15:16
  • how to get public url in callback of S3 ? – s4suryapal Aug 23 '20 at 12:22
  • 3
    I want to apply this solution to my app but request module is deprecated I want to migrate the code using axios is ther anyone could help me? – elpmid Sep 03 '20 at 10:00
  • I cannot get this working. Is anyone able to help me out with a similar issue here https://stackoverflow.com/questions/73196077/using-aws-download-presigned-url-in-node-to-transfer-file-to-aws-bucket – Craig Howell Aug 02 '22 at 03:17
16

This is what I did and works nicely:

const request = require('request-promise')
const AWS = require('aws-sdk')
const s3 = new AWS.S3()

const options = {
    uri: uri,
    encoding: null
};

async load() {

  const body = await request(options)
  
  const uploadResult = await s3.upload({
    Bucket: 'bucket_name',
    Key   : path,
    Body  : body,   
  }).promise()
  
}
Sietze Keuning
  • 161
  • 1
  • 3
5

What about something like that:

const stream = require('stream');
const request = require('request');
const s3 = new AWS.S3()

const pass = new stream.PassThrough();

request(url).pipe(pass);

s3.upload({
    Bucket: 'bucket_name',
    Key: path,
    Body: pass,
});

user108828
  • 1,822
  • 1
  • 14
  • 21
3
import axios from "axios";
import aws from 'aws-sdk'
import crypto from 'crypto'

const s3 = new aws.S3();

export const urlToS3 = async ({ url, bucket = "rememoio-users", key = Date.now() + crypto.randomBytes(8).toString('hex') + ".png" }) => {
  try {
    const { data } = await axios.get(url, { responseType: "stream" });

    const upload = await s3.upload({
      Bucket: bucket,
      ACL: 'public-read',
      Key: key,
      Body: data,
    }).promise();

    return upload.Location;
  } catch (error) {
    console.error(error);
    throw new Error;
  }
};
Daniel
  • 839
  • 10
  • 20
2

You can implement with Axios like this. Refer this for more info.

const axios = require("axios");
const AWS = require("aws-sdk");
const { PassThrough } = require("stream");

const s3 = new AWS.S3({
  accessKeyId: "accessKeyId",
  secretAccessKey: "accessKey",
  region: "region",
});

const bucket = "BucketName";
const key = "key";

const uploadToS3 = async (bucket, key) => {
  try {
    const stream = await axios.get(url, { responseType: "stream" });

    const passThrough = new PassThrough();

    const response = s3.upload({ Bucket: bucket, Key: key, Body: passThrough });

    stream.data.pipe(passThrough);

    return response.then((data) => data.Location).catch((e) => console.error(e));
  } catch (error) {
    console.error(error);
  }
};

uploadToS3(bucket, key);
bkmalan
  • 174
  • 1
  • 3
2

using fetch:

//fetch image from url
const imageResp = await fetch(
    '<image url>'
)
// transform to arrayBuffer
const imageBinaryBuffer = Buffer.from(await imageResp.arrayBuffer())
//get image type
const imageType = imageName.toLowerCase().includes(".png")
    ? "image/png"
    : "image/jpg";

//get presigned url and data [this can be different on your end]
const presignedResponse = await getPresignedURL(imageBinaryBuffer, imageName, imageType)

const s3Result = presignedResponse.data
// build the formData 
let formData = new FormData()
Object.keys(s3Result.fields).forEach(key => {
    formData.append(key, s3Result.fields[key]);
});
formData.append("file", imageBinaryBuffer);

const s3resp = await fetch(s3Result.url, {
    method: "POST",
    body: formData,
});

return s3resp.headers.location