Upload a stream to s3 - node.js

I read Pipe a stream to s3.upload()
but im having difficulty with I am not sure if that actually solves and I have tried.
What I am doing is a get call to www.example.com. this returns a stream, I want to upload that stream to s3.
heres my try.
fetch('https://www.example.com',fileName{
method: 'GET',
headers: {
'Authorization': "Bearer " + myAccessToken,
},
})
.then(function(response) {
return response.text();
})
.then(function(data) {
uploadToS3(data)
});
const uploadToS3 = (data) => {
// Setting up S3 upload parameters
const params = {
Bucket:myBucket,
Key: "fileName",
Body: data
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
output: ///File uploaded successfully. https://exampleBucket.s3.amazonaws.com/fileName.pdf
however this is blank.

I figured it out, but i did not keep using fetch.
and I actually download the file, then upload it. then delete the file.
function getNewFilesFromExampleDotCom(myAccessToken, fileName, fileKey) {
let url2 = 'https://example.com' + fileKey;
axios
.get(url2, {
headers: { 'Authorization': "Bearer " + myAccessToken },
responseType: 'stream',
})
.then(response => {
let file = fileName;
response.data.pipe(fs.createWriteStream(file))
let myFileInfo = [];
if( myFileInfo.length > 0){
myFileInfo.splice(0, myFileInfo.length)
}
myFileInfo.push(file)
processArray(myFileInfo)
console.log(file + " saved")
})
.catch(error => console.log(error));
}
async function processArray(array) {
for (const item of array) {
await delayedLog(item);
}
console.log('Downloaded!');
console.log('Uploading to s3!');
}
function delay() {
return new Promise(resolve => setTimeout(resolve, 300));
}
async function delayedLog(item) {
await delay();
uploadFiles(item)
}
async function uploadFiles(file){
uploadToS3List(file)
await new Promise((resolve, reject) => setTimeout(resolve, 1000));
deleteMyFiles(file)
}
const uploadToS3List = (fileName) => {
// Read content from the file
const fileContent = fs.readFileSync(fileName);
// Setting up S3 upload parameters
const params = {
Bucket:"myBucketName",
Key: fileName,
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
console.log(`File uploaded successfully. ${data.Location}`);
});
};
function deleteMyFiles(path){
fs.unlink(path, (err) => {
console.log(path + " has been deleted")
if (err) {
console.error(err)
return
}
})
}

Related

Fetch multiple files and write to AWS S3 with nodejs Lambda function

I have an array of image urls that I get from an SQS message. I need to download the images and store them in an S3 bucket. If downloading or storing an image fails, I need to catch the error, so I can push the image to another SQS queue for retrying later.
What I have so far does download and store the images, but I don't know how to access the results of the fetch and putObject functions. Also I'm not sure if I'm going about this the right way or if there's a more efficient/better/elegant way to do this.
This is what I have now
const AWS = require("aws-sdk");
const fetch = require("node-fetch")
const s3 = new AWS.S3();
exports.handler = function(event, context) {
// SQS may invoke with multiple messages
for (const message of event.Records) {
const bodyData = JSON.parse(message.body);
const bucket = 'my_images_bucket';
const images = bodyData.images;
let urls = [];
for (const image of images) {
urls.push(image);
}
let promises = urls.map(image => {
fetch(image)
.then((response) => {
if (!response.ok) {
throw new Error('An error occurred while fetching ' + image + ': ' + response.statusText);
}
return response;
})
.then(async res => {
try {
const buffer = await res.buffer();
console.log(image);
// store
return s3.putObject(
{
Bucket: bucket,
Key: image,
Body: buffer,
ContentType: "image/jpeg"
}
).promise();
} catch (e) {
console.log('An error occurred while storing image ' + image + ': ' + e);
}
})
.catch((error) => {
console.error(error);
});
});
Promise.all(promises)
.then(d => {
console.log('All images downloaded.');
console.log('PromiseAll result: ' + d);
}).catch(e => {
console.log('Whoops something went wrong!', e);
});
}
}
The output I get from this:
INFO All images downloaded.
INFO PromiseAll result: ,,,,
INFO https://myserver/10658272812/image14.jpg
INFO https://myserver/10658272810/image12.jpg
INFO https://myserver/10658272804/image6.jpg
INFO https://myserver/10658272813/image15.jpg
INFO https://myserver/10658272816/image18.jpg
I attach the code that I wrote for a similar problem.
const s3Put = (filename, data, mime, s3Params = {}) => {
return new Promise((resolve, reject) => {
s3.putObject({
Bucket: bucket,
Key: filename,
Body: data,
ContentType:
...s3Params
}, (err, data) => {
if (err) {
return reject(err);
}
return resolve(data);
});
});
};
let filePromise = s3Put(to, content, fileMime, s3Params)
.then(() => console.log("MyCode"))
.catch(err => {
const error = {
message: `S3: ${(err.pri && err.pri.message) || (err.internal && err.internal.message)}`,
to
};
errors.push(error);
return onError(error);
});

busboy on field and on file not firing

It worked yesterday, and now it stopped without any changes made to the code. What is going on?
Client
async function uploadFile(file) {
let formData = new FormData();
formData.append("recordUid", recordUid);
formData.append("fieldUid", fieldUid);
formData.append("file", file);
await fetchPostFormData("/api/files", formData);
}
async function fetchPostFormData(url, formData) {);
try {
let result = await (
await fetch(url, {
method: "POST",
withCredentials: true,
credentials: "include",
headers: {
Authorization: localStorage.getItem("token"),
},
body: formData,
})
).json();
return result;
} catch (err) {
return err;
}
}
Server
router.post("/api/files", async (req, res, next) => {
try {
console.log("starting upload..."); // <------------------- THIS ONE IS LOGGED
let bb = busboy({
headers: req.headers,
limits: {
fileSize: 20 * 1024 * 1024, // 20 mb
},
});
let fields = {};
// Get any text values
bb.on("field", (fieldname, val, fieldnameTruncated, valTruncated) => {
console.log("on.field", fieldname, val); // <------------------ NOT FIRING
fields[fieldname] = val;
});
bb.on("file", (fieldname, file, filename, encoding, mimetype) => {
console.log("on.file"); // <----------------------------------- NOT FIRING
let parts = filename.filename.split(".");
let name = parts[0];
let extension = parts[parts.length - 1];
let finalName = `${+new Date()}-${name}.${extension}`;
let filePath = `${filesFolderPath}${finalName}`;
// Open writeable stream to path
let writeStream = fs.createWriteStream(filePath);
// Pipe the file to the opened stream
file.pipe(writeStream);
// Check for errors
writeStream.on("error", (err) => {
console.log(err);
});
writeStream.on("close", async (err) => {
let sizeBytes = fs.statSync(filePath).size;
});
});
bb.on("finish", () => {
res.status(200).send({ success: true });
});
} catch (err) {
next(err);
}
});
Managed to solve it.
The problem was the missing req.pipe(bb) at the very end.
// previous code... ^^^^^
bb.on("finish", () => {
res.status(200).send({ success: true });
});
req.pipe(bb) // <------------- THIS SHIT RIGHT HERE
} catch (err) {
next(err);
}
});

Node.js upload Image Stream.Readable to S3

My lambda is triggered by a request from the browser. The browser sends an image as multipart/form-data.
The lambda uses busboy to parse the request:
function parseForm(event: IHttpEvent) {
return new Promise(
(resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 10 },
});
const imageResponse = new Map<string, IImageParseResponse>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
imageResponse.set(id, { file, filename, mimeType });
});
busboy.on("error", (error) => reject(`Parse error: ${error}`));
busboy.on("finish", () => resolve(imageResponse));
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
When I parsed the request I want to upload the file to AWS S3.
export async function handler(event: IHttpEvent) {
var res = await parseForm(event);
const s3 = new S3Client({ region: "eu-central-1" });
for (const [k, v] of res) {
console.log(`File ${v.filename} ${v.mimeType} streaming`);
const stream = new Readable().wrap(v.file);
const upload = new Upload({
client: s3,
params: {
Key: v.filename,
Bucket: "my-image-bucket",
Body: stream,
ContentType: v.mimeType,
},
});
upload.on("httpUploadProgress", (p) => console.log(p));
const result = await upload.done();
console.log(result);
return result;
}
}
This does not work. However the Browser will receive a 200 OK with a null body response. What confuses me even more is that console.log(result); does not log anything to console.
Where is my mistake? I dont't fully understand the mechanics of streams. But as far as I understand it will be more memory-efficient. In the future I plan to upload multiple images at once. And in order to save cost I want my method to be as efficient as possible.
In general I did 2 mistakes.
Tried to upload the stream when it was already read to the end by busboy
I did not properly wait for the completion of the upload to s3 before terminating the function.
In the end i ended up with the following:
const s3 = new S3Client({ region: "eu-central-1" });
const { BUCKET_NAME, MAX_IMAGE_SIZE } = process.env;
export async function handler(event: IHttpEvent) {
const results = await parseForm(event);
const response = [];
for (const r of results) {
if (r.status === "fulfilled") {
const value: any = r.value.result;
response.push({
id: r.value.id,
key: value.Key,
url: value.Location,
});
}
if (r.status === "rejected")
response.push({ id: r.reason.id, reason: r.reason.error });
}
return response;
}
async function doneHandler(
id: string,
uploadMap: Map<string, Upload>
): Promise<{ id: string; result: ServiceOutputTypes }> {
try {
var result = await uploadMap.get(id).done();
} catch (e: any) {
var error = e;
} finally {
uploadMap.delete(id);
if (error) throw { id, error };
return { id, result };
}
}
function parseForm(event: IHttpEvent) {
return new Promise( (resolve, reject) => {
const busboy = new Busboy({
headers: event.headers,
limits: { files: 1, fileSize: parseInt(MAX_IMAGE_SIZE) },
});
const responses: Promise<{
id: string;
result: ServiceOutputTypes;
}>[] = [];
const uploads = new Map<string, Upload>();
busboy.on("file", (id, file, filename, encoding, mimeType) => {
uploads.set(
id,
new Upload({
client: s3,
params: {
Bucket: BUCKET_NAME,
Body: new Readable().wrap(file),
Key: filename,
ContentType: mimeType,
ContentEncoding: encoding,
},
})
);
responses.push(doneHandler(id, uploads));
file.on("limit", async () => {
const aborts = [];
for (const [k, upload] of uploads) {
aborts.push(upload.abort());
}
await Promise.all(aborts);
return reject(new Error("File is too big."));
});
});
busboy.on("error", (error: any) => {
reject(new Error(`Parse error: ${error}`));
});
busboy.on("finish", async () => {
const res = await Promise.allSettled(responses);
resolve(res);
});
busboy.write(event.body, event.isBase64Encoded ? "base64" : "binary");
busboy.end();
}
);
}
This solution also handles file-limits and tries to abort all pending uploads to S3

Download multiple images from URL and upload to aws s3 from aws lambda (nodejs)

I can use this code to upload a single image to s3. When I try multiple, only the first item is uploaded and lambda stops executing. I'm new to javascript so I don't understand the problem here.
async function downloadImage(url) {
var options = {
uri: url,
encoding: null
};
await new Promise((resolve, reject) => {
request(options, function(error, response, body) {
if (error || response.statusCode !== 200) {
console.log("failed to get image");
console.log(error);
} else {
s3.putObject({
Body: body,
Key: 'template/'+url.split('/').pop(),
Bucket: bucketName
}, function(error, data) {
if (error) {
console.log("error downloading image to s3");
} else {
console.log("success uploading to s3");
}
});
}
});
})
.catch((error) => {
console.log("error");
});
}
exports.handler = async (event, _ctx, _cb) => {
var images = {
banner: "http://media.com/strip.png",
icon: "http://media.com/icon.png",
logo: "http://media.com/logo.png"
}
for (const [key, value] of Object.entries(images)) {
console.log(`${key}: ${value}`);
await downloadImage(value);
}
}
The output I get from lambda for this:
2020-10-03T19:11:32.293Z 3cd401a6-08c6-49a2-b01c-99d6430ffc1a INFO banner: http://media.com/circle/strip.png
2020-10-03T19:11:33.201Z 3cd401a6-08c6-49a2-b01c-99d6430ffc1a INFO success uploading to s3
const downloadImage = async url => {
const options = {
uri: url,
encoding: null
};
const image = await requestPromise(options);
await uploadToS3(image, url);
return 'Uploaded'
}
const requestPromise = options => {
return new Promise((resolve, reject) => {
request(options, (error, response, body) => {
if (error || response.statusCode !== 200) {
console.log("failed to get image: ", error);
return reject(error);
}
resolve(body);
})
})
}
const uploadToS3 = (body, url) => {
return new Promise((resolve, reject) => {
s3.putObject({
Body: body,
Key: 'template/' + url.split('/').pop(),
Bucket: bucketName
}, (err, data) => {
if (err) {
console.log("error downloading image to s3", err);
return reject(err)
};
resolve(data);
});
})
}
exports.handler = async (event, _ctx, _cb) => {
try {
const images = {
banner: "http://media.com/strip.png",
icon: "http://media.com/icon.png",
logo: "http://media.com/logo.png"
}
for (const [key, value] of Object.entries(images)) {
console.log(`${key}: ${value}`);
await downloadImage(value);
}
} catch (error) {
console.log('Error in handler: ', error);
return error
}
}

Axios get a file from URL and upload to s3

I'm trying to get files from a site using axios.get, and then uploading it directly to S3. However, the files are corrupted or not encoded properly, and can't be opened after upload. File types range from .jpg, .png to .pdf. Here is my code:
axios.get(URL, {
responseEncoding: 'binary',
responseType: 'document',
}).then((response) => {
return new Promise((resolve, reject) => {
const s3Bucket = nconf.get('AWS_S3_BUCKET');
s3.upload({
'ACL': 'public-read',
'Body': response.data,
'Bucket': s3Bucket,
'Key': `static/${filePath}/${fileManaged.get('filename')}`,
}, function(err) {
if (err) {
return reject(err);
}
});
});
});
I've tried modifying responseType to arraybuffer and creating a buffer using Buffer.from(response.data, 'binary').toString('base64'), to no avail. What am I missing?
I was able to get it working by using an arraybuffer and the .putObject function instead of .upload
axios.get(encodeURI(url), {
responseType: 'arraybuffer',
}).then((response) => {
s3.putObject({
'ACL': 'public-read',
'Body': response.data,
'Bucket': s3Bucket,
'Key': `static/${filePath}/${fileManaged.get('filename')}`,
} function(err) {
Axios encodes the response body in utf8.
You should use other library like request.
the response from John Xu is correct. But in my case I had to add: Buffer.from(image.data, 'utf8') as stated above in order to get the correct buffer similar to a request response. here is my code:
const AWS = require('aws-sdk');
const axios = require('axios');
/**
* saveImage saves an image file into s3
* #param {*} fullname absolute path and file name of the file to be uploaded
* #param {*} filecontent buffer of the image file
*/
var uploadFile = async function (s3_creds, fullname, filecontent, filetype) {
const s3 = new AWS.S3(s3_creds);
return new Promise((resolve, reject) => {
// Add a file to a Space
var params = {
Key: fullname, // absolute path of the file
Body: filecontent,
Bucket: "docserpcloud",
ACL: "public-read", // or private
ContentEncoding: 'binary',
ContentType: filetype
};
// console.log(params)
s3.putObject(params, function (err, data) {
if (err) {
console.log(err, err.stack);
reject(err)
} else {
resolve(data);
console.log(data);
}
});
})
}
var getFilefromURL = async function (imageuri) {
// console.log (imageuri)
return new Promise((resolve, reject) => {
try {
axios.get(encodeURI(imageuri), {
responseType: "arraybuffer"
}).then((response) => {
resolve(response)
})
} catch (err) {
reject(err)
}
})
}
/**
* saveImageFromUrl gest a file from an url and saves a copy on s3 bucket
* #param {*} imageuri full URL to an image
* #param {*} fullname absolute path and filename of the file to be writen on s3
*/
var saveFileFromUrl = async function (s3_creds, imageuri, fullname) {
return new Promise((resolve, reject) => {
getFilefromURL(imageuri).then(image => {
// console.log(image.res)
uploadFile(s3_creds, fullname, Buffer.from(image.data, 'utf8'), image.headers['content-type']).then(s3response => {
resolve(s3response)
}).catch(err => {
reject(err)
})
}).catch(err => {
reject(err)
})
})
}
module.exports = {
uploadFile: uploadFile,
getFilefromURL: getFilefromURL,
saveFileFromUrl: saveFileFromUrl
}
async function main() {
try {
var s3_creds = {
"accessKeyId": "acessid",
"endpoint": "xxxx.digitaloceanspaces.com",
"secretAccessKey": "Vttkia0....."
};
await saveFileFromUrl(s3_creds, "https://gitlab.com/qtree/erpcloud_logos/-/raw/master/pdf_logo2.png?inline=true", 'media/pddd.png');
} catch {}
}
main();
update s3_creds to fit your credentials and run it to upload the pdf logo.
Regards,
Enrique

Resources