You can do this with the build in http module and streams.
const http = require("http");
const request = (options = {}) => {
options = Object.assign({
method: "GET"
}, options);
return http.request(options);
};
let download = request({
url: "http://127.0.0.1:80",
path: "/bytes/100"
});
// handle get/download response
download.on("response", (res) => {
console.log("Response from download", res.headers);
let upload = request({
url: "http://127.0.0.1:80",
path: "/anything/foo",
method: "POST"
});
// handle post/upload response
upload.on("response", (res) => {
let chunks = [];
res.on("data", (chunk) => {
chunks.push(chunk)
});
res.on("end", () => {
console.log("Body", Buffer.concat(chunks).toString())
});
});
// pipe download to upload
res.pipe(upload);
});
download.end();
I used the httpbin container from "kennethreitz".
docker run -p 80:80 kennethreitz/httpbin
: https://httpbin.org/
The example code above "downloads" 100 bytes, and pipe them to "/anything/foo", which response with debug information about the made request.
Example output:
Response from download {
server: 'gunicorn/19.9.0',
date: 'Thu, 20 Apr 2023 07:43:08 GMT',
connection: 'close',
'content-type': 'application/octet-stream',
'content-length': '100',
'access-control-allow-origin': '*',
'access-control-allow-credentials': 'true'
}
Response from upload {
server: 'gunicorn/19.9.0',
date: 'Thu, 20 Apr 2023 07:43:08 GMT',
connection: 'close',
'content-type': 'application/json',
'content-length': '456',
'access-control-allow-origin': '*',
'access-control-allow-credentials': 'true'
}
Body {
"args": {},
"data": "data:application/octet-stream;base64,KnyWTt134iwvCP8AHAx7eXfPrxpjZUuZMiqUI3y/PAemFqBmAGDZNI7IlP5oQ+pUjKYaKPXH3CjI0HeaSrGefPtztVsJh+R+BR8UaCQAzGCpyCS/fR34k26AnG4b+jK8D1A6vA==",
"files": {},
"form": {},
"headers": {
"Connection": "close",
"Host": "localhost",
"Transfer-Encoding": "chunked"
},
"json": null,
"method": "POST",
"origin": "192.168.16.1",
"url": "http://localhost/anything/foo"
}
In the "upload response" you can see that the uploaded data is encoded as base64 for debugging purpose, but its the exact same data you received from "/bytes/100"
Example code for comparing download with upload buffer:
download.on("response", (res) => {
console.log("Response from download", res.headers);
let recvBuffer = Buffer.alloc(0);
let sendBuffer = Buffer.alloc(0);
let upload = request({
url: "http://127.0.0.1:80",
path: "/anything/foo",
method: "POST"
});
let chunks = [];
res.on("data", (chunk) => {
chunks.push(chunk);
});
res.on("end", () => {
recvBuffer = Buffer.concat(chunks);
});
// handle post/upload response
upload.on("response", (res) => {
console.log("Response from upload", res.headers)
let chunks = [];
res.on("data", (chunk) => {
chunks.push(chunk)
});
res.on("end", () => {
// pase response as json & extract received/send data
let json = JSON.parse(Buffer.concat(chunks).toString());
sendBuffer = Buffer.from(json.data.split(",")[1], "base64");
console.log("Download = Upload:", Buffer.compare(recvBuffer, sendBuffer) === 0);
});
});
// pipe download to upload
res.pipe(upload);
});
Since it uses streams, the memory footprint is pretty low.
Note that the exact solution is depending on your targets that provide the download/uploads endpoints. But since you wanted a boilerplate this should be a good start for you.