SVG files uploaded from S3 not displaying properly - node.js

Hi my SVG files are displaying correctly when using src from local folder. but when I use the GetObject from AWS it's not displaying correctly.
I already set the ContentType as image/svg+xml but for some reason when I check the Network Console, it still returns as octet stream.
When i console.log the ContentType of the returned object in my backend from GetObject. it displays image/svg+xml
i already checked in aws console the meta data and its also image/svg+xml
This is my upload helper function:
exports.uploadTos3withKey = (file, folder) => {
return new Promise((resolve, reject) => {
s3.upload(
{
Key: `${folder}/${file.name}`,
Bucket: process.env.AWS_S3_BUCKETNAME,
Body: fs.readFileSync(file.path),
ACL: "public-read",
ContentType: file.type,
},
(err, data) => {
if (err) return reject(err);
return resolve({
bucket: data.Bucket,
key: data.Key,
});
}
);
});
};
Here's my get request to return my s3 objects
exports.display = async (req, res) => {
const { key, bucket } = req.query;
try {
const data = await getObjectFromS3(key, bucket);
if (data) {
console.log(data.ContentType);
res.setHeader("Content-Type", data.ContentType);
res.send(data.Body);
}
} catch (e) {
res.status(400).json({ e });
}
};
exports.getObjectFromS3 = async (key, bucket) => {
const params = {
Bucket: bucket,
Key: key,
};
const res = await new Promise((resolve, reject) => {
s3.getObject(params, function (err, data) {
err == null ? resolve(data) : reject(err);
});
});
return res;
};
Here's the output of that console.log
image/svg+xml
GET /api/v1/media/display?bucket=mmg-bucket&key=hero/image/2022/2/mbc-media-group.svg 304 914.466 ms - -
Here's the Images of aws and the network console
Everything works well with jpg, and png files. it only occurs on SVG files
AWS console image Network Console image

Related

Fetch multiple files and write to AWS S3 with nodejs Lambda function

I have an array of image urls that I get from an SQS message. I need to download the images and store them in an S3 bucket. If downloading or storing an image fails, I need to catch the error, so I can push the image to another SQS queue for retrying later.
What I have so far does download and store the images, but I don't know how to access the results of the fetch and putObject functions. Also I'm not sure if I'm going about this the right way or if there's a more efficient/better/elegant way to do this.
This is what I have now
const AWS = require("aws-sdk");
const fetch = require("node-fetch")
const s3 = new AWS.S3();
exports.handler = function(event, context) {
// SQS may invoke with multiple messages
for (const message of event.Records) {
const bodyData = JSON.parse(message.body);
const bucket = 'my_images_bucket';
const images = bodyData.images;
let urls = [];
for (const image of images) {
urls.push(image);
}
let promises = urls.map(image => {
fetch(image)
.then((response) => {
if (!response.ok) {
throw new Error('An error occurred while fetching ' + image + ': ' + response.statusText);
}
return response;
})
.then(async res => {
try {
const buffer = await res.buffer();
console.log(image);
// store
return s3.putObject(
{
Bucket: bucket,
Key: image,
Body: buffer,
ContentType: "image/jpeg"
}
).promise();
} catch (e) {
console.log('An error occurred while storing image ' + image + ': ' + e);
}
})
.catch((error) => {
console.error(error);
});
});
Promise.all(promises)
.then(d => {
console.log('All images downloaded.');
console.log('PromiseAll result: ' + d);
}).catch(e => {
console.log('Whoops something went wrong!', e);
});
}
}
The output I get from this:
INFO All images downloaded.
INFO PromiseAll result: ,,,,
INFO https://myserver/10658272812/image14.jpg
INFO https://myserver/10658272810/image12.jpg
INFO https://myserver/10658272804/image6.jpg
INFO https://myserver/10658272813/image15.jpg
INFO https://myserver/10658272816/image18.jpg
I attach the code that I wrote for a similar problem.
const s3Put = (filename, data, mime, s3Params = {}) => {
return new Promise((resolve, reject) => {
s3.putObject({
Bucket: bucket,
Key: filename,
Body: data,
ContentType:
...s3Params
}, (err, data) => {
if (err) {
return reject(err);
}
return resolve(data);
});
});
};
let filePromise = s3Put(to, content, fileMime, s3Params)
.then(() => console.log("MyCode"))
.catch(err => {
const error = {
message: `S3: ${(err.pri && err.pri.message) || (err.internal && err.internal.message)}`,
to
};
errors.push(error);
return onError(error);
});

Node : Wait the python script to write the file then upload it to s3

I have done the following code. Where I create a file by a python script then upload it to S3 then give the user the ability to download it.
exports.createFeature = async (req, res, next) => {
let retourUrl = await uploadFile(req.body)
res.status(201).json(retourUrl)
};
function uploadFile(feature) {
return new Promise(async (resolve, reject) => {
let options = {
scriptPath: 'pathToDcript',
args: [arg1, arg2, arg3]
};
PythonShell.run('script.py', options, function (err) {
if (err) throw err;
console.log('file has been created !');
//read the file
let contents = fs.readFileSync('pathToFile', {encoding:'utf8', flag:'r'});
//convert it to buffer
const fileContent = Buffer.from(contents, "utf-8");
// Setting up S3 upload parameters
let key = keyUserData+feature.userId+'/fileName'
const params = {
Bucket: bucket,
Key: key, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function(err, data) {
if (err) {
throw err;
}
//console.log(`File uploaded successfully. ${data.Location}`);
});
// delete the file
fs.unlinkSync('pathToFile');
//get url for download
const presignedURL = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60*5
})
resolve(presignedURL)
})
});
}
But I have the download url before the file is been uploaded to S3, any idea on how I can make it wait till all finish ?
if you want to use s3.upload with a callback. you need to change your code as mentioned below.
exports.createFeature = async (req, res, next) => {
let retourUrl = await uploadFile(req.body)
res.status(201).json(retourUrl)
};
function uploadFile(feature) {
return new Promise((resolve, reject) => {
let options = {
scriptPath: 'pathToDcript',
args: [arg1, arg2, arg3]
};
PythonShell.run('script.py', options, function (err) {
if (err) throw err;
console.log('file has been created !');
//read the file
let contents = fs.readFileSync('pathToFile', { encoding: 'utf8', flag: 'r' });
//convert it to buffer
const fileContent = Buffer.from(contents, "utf-8");
// Setting up S3 upload parameters
let key = keyUserData + feature.userId + '/fileName'
const params = {
Bucket: bucket,
Key: key, // File name you want to save as in S3
Body: fileContent
};
// Uploading files to the bucket
s3.upload(params, function (err, data) {
if (err) {
throw err;
}
// delete the file
fs.unlinkSync('pathToFile');
//get url for download
const presignedURL = s3.getSignedUrl('getObject', {
Bucket: bucket,
Key: key,
Expires: 60 * 5
})
//console.log(`File uploaded successfully. ${data.Location}`);
resolve(presignedURL)
});
})
});
}
The S3 upload method of the AWS SDK returns a Promise which can be awaited on.
For example:
await s3.upload(...)
Note that in this case the callback function to the Python script should be changed to an async function, in order to allow the await syntax. For example:
PythonShell.run('script.py', options, async function (err)

Axios get a file from URL and upload to s3

I'm trying to get files from a site using axios.get, and then uploading it directly to S3. However, the files are corrupted or not encoded properly, and can't be opened after upload. File types range from .jpg, .png to .pdf. Here is my code:
axios.get(URL, {
responseEncoding: 'binary',
responseType: 'document',
}).then((response) => {
return new Promise((resolve, reject) => {
const s3Bucket = nconf.get('AWS_S3_BUCKET');
s3.upload({
'ACL': 'public-read',
'Body': response.data,
'Bucket': s3Bucket,
'Key': `static/${filePath}/${fileManaged.get('filename')}`,
}, function(err) {
if (err) {
return reject(err);
}
});
});
});
I've tried modifying responseType to arraybuffer and creating a buffer using Buffer.from(response.data, 'binary').toString('base64'), to no avail. What am I missing?
I was able to get it working by using an arraybuffer and the .putObject function instead of .upload
axios.get(encodeURI(url), {
responseType: 'arraybuffer',
}).then((response) => {
s3.putObject({
'ACL': 'public-read',
'Body': response.data,
'Bucket': s3Bucket,
'Key': `static/${filePath}/${fileManaged.get('filename')}`,
} function(err) {
Axios encodes the response body in utf8.
You should use other library like request.
the response from John Xu is correct. But in my case I had to add: Buffer.from(image.data, 'utf8') as stated above in order to get the correct buffer similar to a request response. here is my code:
const AWS = require('aws-sdk');
const axios = require('axios');
/**
* saveImage saves an image file into s3
* #param {*} fullname absolute path and file name of the file to be uploaded
* #param {*} filecontent buffer of the image file
*/
var uploadFile = async function (s3_creds, fullname, filecontent, filetype) {
const s3 = new AWS.S3(s3_creds);
return new Promise((resolve, reject) => {
// Add a file to a Space
var params = {
Key: fullname, // absolute path of the file
Body: filecontent,
Bucket: "docserpcloud",
ACL: "public-read", // or private
ContentEncoding: 'binary',
ContentType: filetype
};
// console.log(params)
s3.putObject(params, function (err, data) {
if (err) {
console.log(err, err.stack);
reject(err)
} else {
resolve(data);
console.log(data);
}
});
})
}
var getFilefromURL = async function (imageuri) {
// console.log (imageuri)
return new Promise((resolve, reject) => {
try {
axios.get(encodeURI(imageuri), {
responseType: "arraybuffer"
}).then((response) => {
resolve(response)
})
} catch (err) {
reject(err)
}
})
}
/**
* saveImageFromUrl gest a file from an url and saves a copy on s3 bucket
* #param {*} imageuri full URL to an image
* #param {*} fullname absolute path and filename of the file to be writen on s3
*/
var saveFileFromUrl = async function (s3_creds, imageuri, fullname) {
return new Promise((resolve, reject) => {
getFilefromURL(imageuri).then(image => {
// console.log(image.res)
uploadFile(s3_creds, fullname, Buffer.from(image.data, 'utf8'), image.headers['content-type']).then(s3response => {
resolve(s3response)
}).catch(err => {
reject(err)
})
}).catch(err => {
reject(err)
})
})
}
module.exports = {
uploadFile: uploadFile,
getFilefromURL: getFilefromURL,
saveFileFromUrl: saveFileFromUrl
}
async function main() {
try {
var s3_creds = {
"accessKeyId": "acessid",
"endpoint": "xxxx.digitaloceanspaces.com",
"secretAccessKey": "Vttkia0....."
};
await saveFileFromUrl(s3_creds, "https://gitlab.com/qtree/erpcloud_logos/-/raw/master/pdf_logo2.png?inline=true", 'media/pddd.png');
} catch {}
}
main();
update s3_creds to fit your credentials and run it to upload the pdf logo.
Regards,
Enrique

nodejs Lambda with S3 upload via API Gateway

I've been trying to get a simple serverless API Gateway -> NodeJS Lambda -> S3 working however it appears that the Lambda just uploads corrupt files.
This code would download the file from a URL then straight upload to S3.
I've tried both putObject & upload (with the different params) with no success. Looking at the file sizes when I download the original is is 24KB and the downloaded (corrupt) image from S3 is 44KB.
I simply test the application by doing a POST to the API Gateway URL.
Any ideas?
var url =
"https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png"
module.exports.upload = function(event, context, callback) {
https.get(url, function(res) {
var body = ""
res.on("data", function(chunk) {
// Agregates chunks
body += chunk
})
res.on("end", function() {
console.log(body)
// Once you received all chunks, send to S3 - putObject only
var params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo.png",
Body: body
}
var s3Params = {
Bucket: S3_BUCKET_NAME,
Key: "aws-logo-upload.png",
Body: body,
ContentType: "image/png"
}
s3.upload(s3Params, function(err, data) {
// s3.putObject(params, function(err, data) {
if (err) {
console.log("error")
console.error(err, err.stack)
callback(null, { statusCode: 404, error })
} else {
console.log("ok")
console.log(data)
let response = {
statusCode: 200
}
callback(null, response)
}
})
})
})
}
The following code works for me outside of API Gateway/Lambda. It yields a PNG in S3 that's downloadable as a valid 23.7 KB image. I'd expect the equivalent to work in Lambda.
const AWS = require('aws-sdk');
const https = require('https');
const s3 = new AWS.S3();
const logourl =
'https://upload.wikimedia.org/wikipedia/commons/thumb/1/1d/AmazonWebservices_Logo.svg/500px-AmazonWebservices_Logo.svg.png';
const getThenUpload = (url, callback) => {
https.get(url, (res) => {
const data = [];
res.on('data', (chunk) => {
data.push(chunk);
});
res.on('end', () => {
const params = {
Bucket: S3_BUCKET_NAME,
Key: 'aws-logo-upload.png',
Body: Buffer.concat(data),
ContentType: 'image/png',
};
s3.upload(params, (err, rsp) => {
if (err) {
console.error(err, err.stack);
callback(err, { statusCode: 404, err });
} else {
console.log(rsp);
callback(null, { statusCode: 200 });
}
});
});
});
};
getThenUpload(logourl, (err, data) => {
if (err) {
console.error(`Error: ${err}`);
} else {
console.log(`Data: ${JSON.stringify(data)}`);
}
});

Creating a GIF from remote stream in graphicsmagick

I am creating a GIF from remote files in node currently by downloading each image to the file system into a tmp folder.
I want to bypass saving the image to a tmp folder and save in memory instead. Is this possible?
As you can see, I have a download function in my AWS class which saves to a tmp folder:
download(key){
return new Promise((resolve, reject) => {
request.head(`${this.base_url}/${this.bucket}/${key}`, (err, res, body) => {
request(`${this.base_url}/${this.bucket}/${key}`)
.pipe(fs.createWriteStream(`tmp/${key}`)).on('close', resolve )
})
})
};
Once they have all downloaded, I have a createGif function in my GifService class which adds each file path as a custom argument of gm, adds a delay of 50ms, resizes then outputs as buffer which I am then uploading to AWS s3.
import gm from 'gm';
...
constructor(){
this.gm = gm()
}
generateGif(images, prefix){
return new Promise((resolve, reject) => {
// for each image we want in array, we pass to gm
images.forEach(image => {
this.gm.in(`tmp/${image.Key}`)
})
// Now we create the gif with 50sec delay between images, sized to 600px x 2
this.gm
.delay(50)
.resize(600,600)
.toBuffer('gif', async (err, buffer) => {
if (err) reject(err)
const params = {
ACL: 'public-read',
Bucket: config.aws_bucket,
ContentType: 'image/gif',
Key: `${prefix}/${uuid()}.gif`,
Body: buffer
}
try{
// uplaod to S3
const upload = await this.aws.upload(params)
// resolve s3 URL
resolve(upload)
}catch(err) {
console.log('err', err)
reject(err)
}
});
})
}
Ideally if I could pass a remote file stream as custom argument, or pass a buffer in as a custom argument as opposed to how I am currently passing in the tmp file path:
images.forEach(image => {
this.gm.in(`tmp/${image.Key}`)
})
I managed to make it work using only streams by converting first the images to miff and concat them into a single stream. Then passing the buffer or the stream into gm again with delay does the trick.
You will need to instal concat-stream npm for this to work.
Sorry for the mixed ES5 code.
import gm from 'gm';
var concat = require('concat-stream');
...
constructor() {
this.gm = gm()
}
start() {
return getYourReadAbleStreamsSomehow().then(streams => {
return generateGif(streams);
}).then(gifBuffer => {
return uploadToAWS(gifBuffer, prefix);
}).catch(err => {
console.log(err)
})
}
uploadToAWS(buffer, prefix) {
const params = {
ACL: 'public-read',
Bucket: config.aws_bucket,
ContentType: 'image/gif',
Key: `${prefix}/${uuid()}.gif`,
Body: buffer
}
try {
// uplaod to S3
const upload = await this.aws.upload(params)
// resolve s3 URL
resolve(upload)
} catch (err) {
console.log('err', err)
reject(err)
}
}
generateGif(imageStreams, delay) {
return new Promise((resolve, reject) => {
var write = concat(function(buffer) {
gm(buffer)
.delay(delay)
.toBuffer('gif', function(err, buffer) {
if (err)
reject(err);
resolve(buffer);
})
})
//Convert to miff and concat streams
var i = 0;
var streamHandler = function() {
gm(imageStreams[i])
.resize('600', '600')
.stream('miff', function(err, stdout, stderr) {
if (err)
reject(err)
var lastOne = i === streams.length - 1;
if (!lastOne)
stdout.once('end', streamHandler)
stdout.pipe(write, {
end: lastOne
});
i++;
});
}
streamHandler();
})
}

Resources