NodeJS, How to avoid sending read-only headers in AWS Lambda#Edge? - node.js

I have set up Lambda on Cloudfront to resize images dynamically on the fly using width and height parameters on images , hence I added the code below
(I didn't write this code, and I can't remember where I got it from)
const querystring = require('querystring');
const http = require('http');
const https = require('https');
const fs = require('fs');
const child = require('child_process');
// headers that cloudfront does not allow in the http response
const blacklistedHeaders = [
/^connection$/i,
/^content-length$/i,
/^expect$/i,
/^keep-alive$/i,
/^proxy-authenticate$/i,
/^proxy-authorization$/i,
/^proxy-connection$/i,
/^trailer$/i,
/^upgrade$/i,
/^x-accel-buffering$/i,
/^x-accel-charset$/i,
/^x-accel-limit-rate$/i,
/^x-accel-redirect$/i,
/^X-Amz-Cf-.*/i,
/^X-Amzn-.*/i,
/^X-Cache.*/i,
/^X-Edge-.*/i,
/^X-Forwarded-Proto.*/i,
/^X-Real-IP$/i
];
exports.handler = (event, context, callback) => {
console.log(JSON.stringify(event, null, 2));
const request = event.Records[0].cf.request;
const origin = request.origin.custom;
const protocol = origin.protocol;
const tmpPath = '/tmp/sourceImage';
const targetPath = '/tmp/targetImage';
const getFile = origin.protocol === 'https' ?
https.get :
http.get;
const options = querystring.parse(request.querystring);
const maxSize = 5000;
const width = Math.min(options.width || maxSize, maxSize);
const height = Math.min(options.height || maxSize, maxSize);
// make sure input values are numbers
if (Number.isNaN(width) || Number.isNaN(height)) {
console.log('Invalid input');
context.succeed({
status: '400',
statusDescription: 'Invalid input'
});
return;
}
// dowload the file from the origin server
getFile(`${origin.protocol}://${origin.domainName}${origin.path}${request.uri}`, (res) => {
const statusCode = res.statusCode;
console.log(res.headers);
// grab headers from the origin request and reformat them
// to match the lambda#edge return format
const originHeaders = Object.keys(res.headers)
// some headers we get back from the origin
// must be filtered out because they are blacklisted by cloudfront
.filter((header) => blacklistedHeaders.every((blheader) => !blheader.test(header)))
.reduce((acc, header) => {
acc[header.toLowerCase()] = [
{
key: header,
value: res.headers[header]
}
];
return acc;
}, {})
if (statusCode === 200) {
const writeStream = fs.createWriteStream(tmpPath);
res
.on('error', (e) => {
context.succeed({
status: '500',
statusDescription: 'Error downloading the image'
});
})
.pipe(writeStream)
writeStream
.on('finish', () => {
console.log('image downloaded');
try {
// invoke ImageMagick to resize the image
const stdout = child.execSync(
`convert ${tmpPath} -resize ${width}x${height}\\> -quality 80 ${targetPath}`
);
} catch(e) {
console.log('ImageMagick error');
console.log(e.stderr.toString());
context.succeed({
status: '500',
statusDescription: 'Error resizing image'
});
return;
}
const image = fs.readFileSync(targetPath).toString('base64');
context.succeed({
bodyEncoding: 'base64',
body: image,
headers: originHeaders,
status: '200',
statusDescription: 'OK'
});
})
.on('error', (e) => {
console.log(e);
context.succeed({
status: '500',
statusDescription: 'Error writing the image to a file'
});
})
} else {
// grap the status code from the origin request
// and return to the viewer
console.log('statusCode: ', statusCode);
context.succeed({
status: statusCode.toString(),
headers: originHeaders
});
}
})
};
I am getting a 502 error on some images, not all of them
The Lambda function result failed validation: The function tried to add, delete, or change a read-only header.

Related

Push img resolution from loop to array

I receive an array of images and want to get the resolution (width, height) and store them properly to the db. How can I push the resolution const resolution correctly into the array of the respective file?
const FilesStorage = require("../models/filesStorageModel");
const sizeOf = require('image-size');
const url = require('url')
const https = require('https')
exports.uploadFiles = async (req, res) => {
const filesArray = req.files
try {
for (const file of filesArray) {
const imgUrl = file.location
const options = url.parse(imgUrl)
https.get(options, (res) => {
const chunks = []
res.on('data', (chunk) => {
chunks.push(chunk)
}).on('end', () => {
const buffer = Buffer.concat(chunks)
const resolution = sizeOf(buffer)
console.log(resolution)
})
})
const uploadFiles = new FilesStorage({
name: file.originalname,
altTag: "alt",
format: file.mimetype,
filePath: file.location,
sizeKB: (file.size / 1000),
height: "resolution.height", // How?
width: "resolution.width", // How?
})
await uploadFiles.save()
}
return res.status(200).json({
success: true,
message: 'Files successfully uploaded',
})
} catch(err) {
return res.status(500).json({
success: false,
message: `Something wen't wrong`,
})
}
}
Here's the answer you're looking for:
First, we need to consider variable scopes. When we look at the resolution constant you have defined in your handler, we can see the scope makes it local to that handler and not the parent code block.
To make it accessible in other areas of the code block, we move it above the handler scope but keep it inside your for loop so it's file-specific. With this change, we can now insert the resolution into your database upload call.
Hope this helps!
const FilesStorage = require("../models/filesStorageModel");
const sizeOf = require('image-size');
const url = require('url')
const https = require('https')
exports.uploadFiles = async(req, res) => {
const filesArray = req.files
try {
for (const file of filesArray) {
const imgUrl = file.location
const options = url.parse(imgUrl)
//Added variable outside arrow function scope
var resolution;
https.get(options, (res) => {
const chunks = []
res.on('data', (chunk) => {
chunks.push(chunk)
}).on('end', () => {
const buffer = Buffer.concat(chunks)
//Set value to variable outside function scope
resolution = sizeOf(buffer)
console.log(resolution)
})
})
const uploadFiles = new FilesStorage({
name: file.originalname,
altTag: "alt",
format: file.mimetype,
filePath: file.location,
sizeKB: (file.size / 1000),
//Get value by accessing variable above
height: resolution.height,
width: resolution.width,
})
await uploadFiles.save()
}
return res.status(200).json({
success: true,
message: 'Files successfully uploaded',
})
} catch (err) {
return res.status(500).json({
success: false,
message: `Something wen't wrong`,
})
}
}

NodeJS API to upload image to S3 not returning response

I am trying to upload image to AWS S3 bucket using NodeJS. The issue I am facing it is while the image is getting saved but the API is returning 404(Not Found). Here is my controller code:
async UploadProfileImage(ctx) {
try {
var file = ctx.request.files.profileImage;
if (file) {
fs.readFile(file.path, (err, fileData) => {
var resp = s3Utility.UploadProfileImageToS3(file.name, fileData);
//Not reaching here. Although E3 tag printing in console.
console.log(resp);
ctx.status = 200;
ctx.body = { response: 'file Uploaded!' };
});
}
else {
ctx.status = 400;
ctx.body = { response: 'File not found!' };
}
} catch (error) {
ctx.status = 500;
ctx.body = { response: 'There was an error. Please try again later!' };
}
}
Utility Class I am using is:
const AWS = require('aws-sdk');
const crypto = require("crypto");
var fs = require('fs');
const mime = require('mime-types');
export class S3Utility {
constructor() { }
async UploadProfileImageToS3(fileName, data) {
let randomId = crypto.randomBytes(16).toString("hex");
AWS.config.update({ region: "Region", accessKeyId: "KeyID", secretAccessKey: "SecretAccessKey" });
var s3 = new AWS.S3();
var imageName = randomId + fileName;
var params = {
Bucket: "BucketName"
Key: imageName,
Body: data,
ContentType: mime.lookup(fileName)
};
return new Promise((resolve, reject) => {
s3.putObject(params, function (err, data) {
if (err) {
console.log('Error: ', err);
reject(new Error(err.message));
} else {
console.log(data);
resolve({
response: data,
uploadedFileName: imageName
});
}
});
});
}
}
const s3Utility: S3Utility = new S3Utility();
export default s3Utility;
The code is uploading file on S3 but it is not returning proper response. Upon testing this endpoint on postman, I get "Not Found" message. However, I can see E Tag getting logged in console. I don't know what is going wrong here. I am sure it has something to do with promise. Can someone please point out or fix the mistake?
Edit:
Using async fs.readFile does the trick.
const fs = require('fs').promises;
const fileData = await fs.readFile(file.path, "binary");
var resp = await s3Utility.UploadProfileImageToS3(file.name, fileData);

node.js cannot parse json - reason: Unexpected token in JSON at position 0

I've been trying for several days but I can't do the diel json parser at this url http://sdmx.istat.it/SDMXWS/rest/dataflow/IT1//?detail=full&references=none&format=jsonstructure
I can write it to a file and if I try to upload the file to one of the many online tools it tells me that it is valid.
Also the url test in the same online tools gives me valid json.
this is my code what am I wrong?
var express = require('express');
var router = express.Router();
const fetch = require('node-fetch');
const JSONstat = require("jsonstat-toolkit");
const JSONstatUtils = require("jsonstat-suite");
const fs = require('fs');
router.get('/', async(req, res) => {
var ws_entry_point ='http://sdmx.istat.it/SDMXWS/rest';
var resource = 'dataflow';
var agencyID ='IT1';
var detail = 'full';
var references = 'none';
var format = 'jsonstructure';
var queryUrl = ws_entry_point + '/' + resource + '/' + agencyID + '//?detail=' + detail + '&references=' + references + '&format=' + format;
//http://sdmx.istat.it/SDMXWS/rest/dataflow/IT1//?detail=full&references=none&format=jsonstructure
console.log( queryUrl );
fetch(queryUrl, {
method: 'GET',
headers: {
'Content-Type': 'application/json, charset=UTF-8'
}}).then(checkResponseStatus)
.then(async response => {
try {
const data = await response.json()
console.log('response data?', data)
} catch(error) {
console.log('Error happened here!')
console.error(error)
}
}).catch(err => console.log(err));
// fs.writeFile('dataflow.json', res.data, function (err) {
// if (err) return console.log(err);
//
// console.log('write response.text > dataflow.json');
// fs.readFile('dataflow.json', 'utf8', function (err, dataF) {
// if (err) throw err; // we'll not consider error handling for now
// console.log('read < dataflow.json');
// //var obj = JSON.parse(dataF);
// console.log(dataF);
// });
// });
});
function checkResponseStatus(res) {
if(res.ok){
return res
} else {
throw new Error(`The HTTP status of the reponse: ${res.status} (${res.statusText})`);
}
}
any help is appreciated, regards,
Maurizio
The server does not respond with the requested application/json content-type, instead it sends a response with content-type application/vnd.sdmx.structure+json and a response content that contains a json-like string but with some incorrect whitespaces.
You can fix this by using .text() on the response and manually parsing the content after trimming it:
const rawData = await response.text();
const data = JSON.parse(rawData.trim());
console.log('response data?', data)

Get progress of firebase admin file upload

I'm trying to get the progress of a 1 minute video uploading to firebase bucket storage using the admin sdk. I've seen a lot about using firebase.storage().ref.child..... but I'm unable to do that with the admin sdk since they don't have the same functions. This is my file upload:
exports.uploadMedia = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers, limits: { files: 1, fileSize: 200000000 } });
let mediaFileName;
let mediaToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if(mimetype !== 'image/jpeg' && mimetype !== 'image/png' && mimetype !== 'video/quicktime' && mimetype !== 'video/mp4') {
console.log(mimetype);
return res.status(400).json({ error: 'Wrong file type submitted, only .png, .jpeg, .mov, and .mp4 files allowed'})
}
// my.image.png
const imageExtension = filename.split('.')[filename.split('.').length - 1];
//43523451452345231234.png
mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), mediaFileName);
mediaToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
file.on('limit', function(){
fs.unlink(filepath, function(){
return res.json({'Error': 'Max file size is 200 Mb, file size too large'});
});
});
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(mediaToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: mediaToBeUploaded.mimetype
}
}
})
.then(() => {
const meadiaUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${mediaFileName}?alt=media`;
return res.json({mediaUrl: meadiaUrl});
})
.catch((err) => {
console.error(err);
return res.json({'Error': 'Error uploading media'});
});
});
req.pipe(busboy);
}
This works okay right now, but the only problem is that the user can't see where their 1 or 2 minute video upload is at. Currently, it's just a activity indicator and the user just sits their waiting without any notice. I'm using react native on the frontend if that helps with anything. Would appreciate any help!
I was able to implement on the client side a lot easier... but it works perfect with image and video upload progress. On the backend, I was using the admin sdk, but frontend I was originally using the firebase sdk.
this.uploadingMedia = true;
const imageExtension = this.mediaFile.split('.')[this.mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const response = await fetch(this.mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
imageUrl = [];
videoUrl = [url];
this.uploadingMedia = false;
this.submitPost(imageUrl, videoUrl);
});
});
export const uploadFile = (
folderPath,
fileName,
file,
generateDownloadURL = true,
updateInformationUploadProgress
) => {
return new Promise((resolve, reject) => {
try {
const storageRef = firebaseApp.storage().ref(`${folderPath}/${fileName}`)
const uploadTask = storageRef.put(file)
uploadTask.on(
'state_changed',
snapshot => {
if (updateInformationUploadProgress) {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
updateInformationUploadProgress({
name: fileName,
progress: progress,
})
}
},
error => {
console.log('upload error: ', error)
reject(error)
},
() => {
if (generateDownloadURL) {
uploadTask.snapshot.ref
.getDownloadURL()
.then(url => {
resolve(url)
})
.catch(error => {
console.log('url error: ', error.message)
reject(error)
})
} else {
resolve(uploadTask.snapshot.metadata.fullPath)
}
}
)
} catch (error) {
reject(error)
}
})
}

How to send selected files to a node server using react native fs?

I am using https://github.com/itinance/react-native-fs to upload files from a react-native client, but it's not getting received in my nodejs server. By the way, I have used react-native-document-picker https://github.com/Elyx0/react-native-document-picker to select the files from the Android file system. Here is my client app's code;
async uploadToNode() {
let testUrl = this.state.multipleFile[0].uri; //content://com.android.providers.media.documents/document/image%3A7380
const split = testUrl.split('/');
const name = split.pop();
const setFileName = "Img"
const inbox = split.pop();
const realPath = `${RNFS.TemporaryDirectoryPath}${inbox}/${name}`;
const uploadUrl = "http://localhost:8082/uploadToIpfs";
var uploadBegin = (response) => {
const jobId = response.jobId;
console.log('UPLOAD HAS BEGUN! JobId: ' + jobId);
};
var uploadProgress = (response) => {
const percentage = Math.floor((response.totalBytesSent/response.totalBytesExpectedToSend) * 100);
console.log('UPLOAD IS ' + percentage + '% DONE!');
};
RNFS.uploadFiles({
toUrl: uploadUrl,
files: [{
name: setFileName,
filename:name,
filepath: realPath,
}],
method: 'POST',
headers: {
'Accept': 'application/json',
},
begin: uploadBegin,
beginCallback: uploadBegin, // Don't ask me, only way I made it work as of 1.5.1
progressCallback: uploadProgress,
progress: uploadProgress
})
.then((response) => {
console.log(response,"<<< Response");
if (response.statusCode == 200) { //You might not be getting a statusCode at all. Check
console.log('FILES UPLOADED!');
} else {
console.log('SERVER ERROR');
}
})
.catch((err) => {
if (err.description) {
switch (err.description) {
case "cancelled":
console.log("Upload cancelled");
break;
case "empty":
console.log("Empty file");
default:
//Unknown
}
} else {
//Weird
}
console.log(err);
});
}
I'm not sure if the nodejs code is correct to get the files from the client app. And here is my Server code;
app.post('/uploadToIpfs', (req, res) => {
// network.changeCarOwner(req.body.key, req.body.newOwner)
// .then((response) => {
// res.send(response);
// });
// var fileName = "Img";
// if(req.name == fileName){
// console.log(req.filename);
// res.send("Passed")
// }else{
// res.send("failed")
// }
console.log(req.files[0].filename);
res.send("Passed")
});
app.listen(process.env.PORT || 8082);

Resources