Push img resolution from loop to array - node.js

I receive an array of images and want to get the resolution (width, height) and store them properly to the db. How can I push the resolution const resolution correctly into the array of the respective file?
const FilesStorage = require("../models/filesStorageModel");
const sizeOf = require('image-size');
const url = require('url')
const https = require('https')
exports.uploadFiles = async (req, res) => {
const filesArray = req.files
try {
for (const file of filesArray) {
const imgUrl = file.location
const options = url.parse(imgUrl)
https.get(options, (res) => {
const chunks = []
res.on('data', (chunk) => {
chunks.push(chunk)
}).on('end', () => {
const buffer = Buffer.concat(chunks)
const resolution = sizeOf(buffer)
console.log(resolution)
})
})
const uploadFiles = new FilesStorage({
name: file.originalname,
altTag: "alt",
format: file.mimetype,
filePath: file.location,
sizeKB: (file.size / 1000),
height: "resolution.height", // How?
width: "resolution.width", // How?
})
await uploadFiles.save()
}
return res.status(200).json({
success: true,
message: 'Files successfully uploaded',
})
} catch(err) {
return res.status(500).json({
success: false,
message: `Something wen't wrong`,
})
}
}

Here's the answer you're looking for:
First, we need to consider variable scopes. When we look at the resolution constant you have defined in your handler, we can see the scope makes it local to that handler and not the parent code block.
To make it accessible in other areas of the code block, we move it above the handler scope but keep it inside your for loop so it's file-specific. With this change, we can now insert the resolution into your database upload call.
Hope this helps!
const FilesStorage = require("../models/filesStorageModel");
const sizeOf = require('image-size');
const url = require('url')
const https = require('https')
exports.uploadFiles = async(req, res) => {
const filesArray = req.files
try {
for (const file of filesArray) {
const imgUrl = file.location
const options = url.parse(imgUrl)
//Added variable outside arrow function scope
var resolution;
https.get(options, (res) => {
const chunks = []
res.on('data', (chunk) => {
chunks.push(chunk)
}).on('end', () => {
const buffer = Buffer.concat(chunks)
//Set value to variable outside function scope
resolution = sizeOf(buffer)
console.log(resolution)
})
})
const uploadFiles = new FilesStorage({
name: file.originalname,
altTag: "alt",
format: file.mimetype,
filePath: file.location,
sizeKB: (file.size / 1000),
//Get value by accessing variable above
height: resolution.height,
width: resolution.width,
})
await uploadFiles.save()
}
return res.status(200).json({
success: true,
message: 'Files successfully uploaded',
})
} catch (err) {
return res.status(500).json({
success: false,
message: `Something wen't wrong`,
})
}
}

Related

Kuzzle / Minio example usage

Dose the Kuzzle or Minio development teams have a working example of using the Kuzzle S3 plugin for Minio? I have the following but my file isnt being uploaded and the pre-signed url is referring to https://your-s3-bucket.s3.eu-west-3.amazonaws.com/
const fs = require("fs");
const fsPromises = require('fs').promises;
// Create a JS File object instance from a local path using Node.js
const fileObject = require("get-file-object-from-local-path");
// Promise based HTTP client for the browser and node.js
const axios = require('axios');
// Loads the Kuzzle SDK modules
const {
Kuzzle,
WebSocket
} = require('kuzzle-sdk');
var start = new Date();
const webSocketOptionsObject = {
"autoReconnect": true,
"ssl": true,
"port": 443
};
const kuzzle = new Kuzzle(new WebSocket('myurl.com', webSocketOptionsObject));
const credentials = { username: 'xyz123', password: 'fithenmgjtkj' };
const path = __dirname + "\\" + "yellow_taxi_data.csv"; // the "\\" is for Windows path
var fileData = {};
// check file exists
fs.access(path, fs.F_OK, (err) => {
if (err) {
console.error(err)
return
}
fileData = new fileObject.LocalFileData(path);
// Adds a listener to detect connection problems
kuzzle.on('networkError', error => {
console.error('Network Error:', error);
});
});
const connectToKuzzle = async () => {
// Connects to the Kuzzle server
await kuzzle.connect();
return await kuzzle.auth.login('local', credentials);
// console.log('jwt auth token: ', jwt);
}
const disConnectFromKuzzle = async () => {
console.log('Disconnected from Kuzzle');
kuzzle.disconnect();
var time = new Date() - start;
// sec = Math.floor((time/1000) % 60);
console.log('Execution time in milliseconds: ', time);
}
const presignedURL = async () => {
// Get a Presigned URL
const result = await kuzzle.query({
controller: 's3/upload',
action: 'getUrl',
uploadDir: 'proxybucket', // directory name inside the Bucket specified in the s3 plugin bucket name
filename: fileData.name
});
console.log("result: ", result);
return result;
}
const loadFileStream = async () => {
console.log('getting file: ', path);
targetFile = null;
await fs.promises.readFile(path)
.then(function (result) {
console.log("file loaded------", result.length);
targetFile = result;
})
.catch(function (error) {
console.log(error);
return;
});
return targetFile;
}
const kuzzleValidate = async (kuzzleResource) => {
// console.log("kuzzleResource: ", kuzzleResource.result.fileKey);
// validate
// Validate and persist a previsously uploaded file.
// https://docs.kuzzle.io/official-plugins/s3/2/controllers/upload/validate/
const Presult = await kuzzle.query({
// Kuzzle API params
"controller": "s3/upload",
"action": "validate",
// File key in S3 bucket
"fileKey": kuzzleResource.result.fileKey
});
console.log('validate: ', Presult.result.fileUrl);
}
const uploadFile = async (fileBuffer, kuzzleResource, jwt) => {
// options at https://github.com/axios/axios
const axiosOptions = {
headers: {
'Content-Type': fileData.type
},
maxBodyLength: 200000000 // 200,000,000 bytes 200 Mb
};
// PUT the fileBuffer to the Kuzzle S3 endpoint
// https://github.com/axios/axios
axios.defaults.headers.common['Authorization'] = jwt;
const response = await axios.put(kuzzleResource.result.uploadUrl, fileBuffer, axiosOptions)
.then((response) => {
console.log('file uploaded......');
})
.catch(function (error) {
console.log("File upload error: ", error);
return;
});
return "Upload successful";
}
if (fileData) {
connectToKuzzle().then((jwt) => {
console.log(jwt);
// upload(jwt);
presignedURL().then((kuzzleResource) => {
loadFileStream().then((fileBuffer) => {
uploadFile(fileBuffer, kuzzleResource, jwt).then((doneMessage) => {
console.log("doneMessage: ", doneMessage);
}).then(() => {
kuzzleValidate(kuzzleResource).then(() => {
disConnectFromKuzzle();
});
});
});
});
});
}
I'm looking to upload to a Minio bucket and obtain a pre-signedURL so I can store it in a document later.
You can change the endpoint configuration to set a different s3-compatible endpoint who can be a Minio one.
This configuration can be changer under the plugins.s3.endpoint key. You should also disable the usage of default s3 path.
Example:
app.config.set('plugins.s3.endpoint', 'https://minio.local');
app.config.set('plugins.s3.s3ClientOptions.s3ForcePathStyle', false);

Get progress of firebase admin file upload

I'm trying to get the progress of a 1 minute video uploading to firebase bucket storage using the admin sdk. I've seen a lot about using firebase.storage().ref.child..... but I'm unable to do that with the admin sdk since they don't have the same functions. This is my file upload:
exports.uploadMedia = (req, res) => {
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
const busboy = new BusBoy({ headers: req.headers, limits: { files: 1, fileSize: 200000000 } });
let mediaFileName;
let mediaToBeUploaded = {};
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if(mimetype !== 'image/jpeg' && mimetype !== 'image/png' && mimetype !== 'video/quicktime' && mimetype !== 'video/mp4') {
console.log(mimetype);
return res.status(400).json({ error: 'Wrong file type submitted, only .png, .jpeg, .mov, and .mp4 files allowed'})
}
// my.image.png
const imageExtension = filename.split('.')[filename.split('.').length - 1];
//43523451452345231234.png
mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const filepath = path.join(os.tmpdir(), mediaFileName);
mediaToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
file.on('limit', function(){
fs.unlink(filepath, function(){
return res.json({'Error': 'Max file size is 200 Mb, file size too large'});
});
});
});
busboy.on('finish', () => {
admin
.storage()
.bucket()
.upload(mediaToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: mediaToBeUploaded.mimetype
}
}
})
.then(() => {
const meadiaUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${mediaFileName}?alt=media`;
return res.json({mediaUrl: meadiaUrl});
})
.catch((err) => {
console.error(err);
return res.json({'Error': 'Error uploading media'});
});
});
req.pipe(busboy);
}
This works okay right now, but the only problem is that the user can't see where their 1 or 2 minute video upload is at. Currently, it's just a activity indicator and the user just sits their waiting without any notice. I'm using react native on the frontend if that helps with anything. Would appreciate any help!
I was able to implement on the client side a lot easier... but it works perfect with image and video upload progress. On the backend, I was using the admin sdk, but frontend I was originally using the firebase sdk.
this.uploadingMedia = true;
const imageExtension = this.mediaFile.split('.')[this.mediaFile.split('.').length - 1];
const mediaFileName = `${Math.round(Math.random()*100000000000)}.${imageExtension}`;
const response = await fetch(this.mediaFile);
const blob = await response.blob();
const storageRef = storage.ref(`${mediaFileName}`).put(blob);
storageRef.on(`state_changed`,snapshot=>{
this.uploadProgress = (snapshot.bytesTransferred/snapshot.totalBytes);
}, error=>{
this.error = error.message;
this.submitting = false;
this.uploadingMedia = false;
return;
},
async () => {
storageRef.snapshot.ref.getDownloadURL().then(async (url)=>{
imageUrl = [];
videoUrl = [url];
this.uploadingMedia = false;
this.submitPost(imageUrl, videoUrl);
});
});
export const uploadFile = (
folderPath,
fileName,
file,
generateDownloadURL = true,
updateInformationUploadProgress
) => {
return new Promise((resolve, reject) => {
try {
const storageRef = firebaseApp.storage().ref(`${folderPath}/${fileName}`)
const uploadTask = storageRef.put(file)
uploadTask.on(
'state_changed',
snapshot => {
if (updateInformationUploadProgress) {
const progress =
(snapshot.bytesTransferred / snapshot.totalBytes) * 100
updateInformationUploadProgress({
name: fileName,
progress: progress,
})
}
},
error => {
console.log('upload error: ', error)
reject(error)
},
() => {
if (generateDownloadURL) {
uploadTask.snapshot.ref
.getDownloadURL()
.then(url => {
resolve(url)
})
.catch(error => {
console.log('url error: ', error.message)
reject(error)
})
} else {
resolve(uploadTask.snapshot.metadata.fullPath)
}
}
)
} catch (error) {
reject(error)
}
})
}

Upload multiple images on Firebase using NodeJS and Busboy

I created function for uploading a single image on Firebase using NodeJS and Busboy, which returns image url. Allowed image extensions are only .jpg and .png. It will generate random filename and create filepath with storageBucket.
However, I am struggling to refactor this function, so I could upload multiple images. I tried several attempts, but no luck. It should return array of image urls, if all images were uploaded successfully.
Here is my function with single image upload:
const { admin, db } = require("./admin");
const config = require("./config");
exports.uploadImage = (req, res, url, folder) => {
const BusBoy = require("busboy");
const path = require("path");
const os = require("os");
const fs = require("fs");
const busboy = new BusBoy({ headers: req.headers });
let imageFileName;
let imageToBeUploaded = {};
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== "image/jpeg" && mimetype !== "image/png") {
return res
.status(400)
.json({ error: "Wrong file type submitted!" });
}
// Getting extension of any image
const imageExtension = filename.split(".")[
filename.split(".").length - 1
];
// Setting filename
imageFileName = `${Math.round(
Math.random() * 1000000000
)}.${imageExtension}`;
// Creating path
const filepath = path.join(os.tmpdir(), imageFileName);
imageToBeUploaded = { filepath, mimetype };
file.pipe(fs.createWriteStream(filepath));
});
busboy.on("finish", () => {
admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
destination: `${folder}/${imageFileName}`,
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
})
.then(() => {
const imageUrl = `https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o${folder}%2F${imageFileName}?alt=media`;
if (url === `/users/${req.user.alias}`) {
return db.doc(`${url}`).update({ imageUrl });
} else {
return res.json({ imageUrl });
}
})
.then(() => {
return res.json({
message: "Image uploaded successfully!"
});
})
.catch(err => {
console.log(err);
return res.status(500).json({ error: err.code });
});
});
busboy.end(req.rawBody);
};
Any suggestions how to move on?
You've the code almost done, all you've got to do is to create an array of promises and wait for all to resolve.
let imageFileName = {}
let imagesToUpload = []
let imageToAdd = {}
//This triggers for each file type that comes in the form data
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== "image/jpeg" && mimetype !== "image/png") {
return res
.status(400)
.json({ error: "Wrong file type submitted!" });
}
// Getting extension of any image
const imageExtension = filename.split(".")[
filename.split(".").length - 1
];
// Setting filename
imageFileName = `${Math.round(
Math.random() * 1000000000
)}.${imageExtension}`;
// Creating path
const filepath = path.join(os.tmpdir(), imageFileName);
imageToAdd = {
imageFileName
filepath,
mimetype };
file.pipe(fs.createWriteStream(filepath));
//Add the image to the array
imagesToUpload.push(imageToAdd);
});
busboy.on("finish", () => {
let promises = []
let imageUrls = []
imagesToUpload.forEach(imageToBeUploaded => {
imageUrls.push(`https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o${folder}%2F${imageFileName}?alt=media`)
promises.push(admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
destination: `${folder}/${imageFileName}`,
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype
}
}
}))
})
try{
await Promises.all(resolve)
res.status(200).json({msg: 'Successfully uploaded all images', imageUrls})
}catch(err){ res.status(500).json(err) }
});
busboy.end(req.rawBody);
With that you should be able to upload them all, it's just a matter of putting all promises inside an array and use the Promise.all method to wait for them to resolve. I made it with async/await because that's how I've been doing it but I suppose you would have no problem in doing it with the callbacks.
Also the code is messy but that's mostly because I dont know how to use this text editor, I hope you can still understand it 👀
Samuel Vera's answer is almost correct. There are some typos and a logic error when push to imageUrls array.
Here, the complete code fixed:
const BusBoy = require('busboy');
const path = require('path');
const os = require('os');
const fs = require('fs');
let fields = {};
const busboy = new BusBoy({ headers: request.headers });
let imageFileName = {};
let imagesToUpload = [];
let imageToAdd = {};
let imageUrls = [];
busboy.on('field', (fieldname, fieldvalue) => {
fields[fieldname] = fieldvalue;
});
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
if (mimetype !== 'image/jpeg' && mimetype !== 'image/png') {
return res
.status(400)
.json({ error: 'Wrong file type submitted!' });
}
// Getting extension of any image
const imageExtension = filename.split('.')[
filename.split('.').length - 1
];
// Setting filename
imageFileName = `${Math.round(Math.random() * 1000000000)}.${imageExtension}`;
// Creating path
const filepath = path.join(os.tmpdir(), imageFileName);
imageToAdd = {
imageFileName,
filepath,
mimetype,
};
file.pipe(fs.createWriteStream(filepath));
//Add the image to the array
imagesToUpload.push(imageToAdd);
});
busboy.on('finish', async () => {
let promises = [];
imagesToUpload.forEach((imageToBeUploaded) => {
imageUrls.push(
`https://firebasestorage.googleapis.com/v0/b/${config.storageBucket}/o/${imageToBeUploaded.imageFileName}?alt=media`
);
promises.push(
admin
.storage()
.bucket()
.upload(imageToBeUploaded.filepath, {
resumable: false,
metadata: {
metadata: {
contentType: imageToBeUploaded.mimetype,
},
},
})
);
});
try {
await Promise.all(promises);
return response.json({
message: `Images URL: ${imageUrls}`,
});
} catch (err) {
console.log(err);
response.status(500).json(err);
}
});
busboy.end(request.rawBody);
Anyway, thank you Samuel :)

NodeJS, How to avoid sending read-only headers in AWS Lambda#Edge?

I have set up Lambda on Cloudfront to resize images dynamically on the fly using width and height parameters on images , hence I added the code below
(I didn't write this code, and I can't remember where I got it from)
const querystring = require('querystring');
const http = require('http');
const https = require('https');
const fs = require('fs');
const child = require('child_process');
// headers that cloudfront does not allow in the http response
const blacklistedHeaders = [
/^connection$/i,
/^content-length$/i,
/^expect$/i,
/^keep-alive$/i,
/^proxy-authenticate$/i,
/^proxy-authorization$/i,
/^proxy-connection$/i,
/^trailer$/i,
/^upgrade$/i,
/^x-accel-buffering$/i,
/^x-accel-charset$/i,
/^x-accel-limit-rate$/i,
/^x-accel-redirect$/i,
/^X-Amz-Cf-.*/i,
/^X-Amzn-.*/i,
/^X-Cache.*/i,
/^X-Edge-.*/i,
/^X-Forwarded-Proto.*/i,
/^X-Real-IP$/i
];
exports.handler = (event, context, callback) => {
console.log(JSON.stringify(event, null, 2));
const request = event.Records[0].cf.request;
const origin = request.origin.custom;
const protocol = origin.protocol;
const tmpPath = '/tmp/sourceImage';
const targetPath = '/tmp/targetImage';
const getFile = origin.protocol === 'https' ?
https.get :
http.get;
const options = querystring.parse(request.querystring);
const maxSize = 5000;
const width = Math.min(options.width || maxSize, maxSize);
const height = Math.min(options.height || maxSize, maxSize);
// make sure input values are numbers
if (Number.isNaN(width) || Number.isNaN(height)) {
console.log('Invalid input');
context.succeed({
status: '400',
statusDescription: 'Invalid input'
});
return;
}
// dowload the file from the origin server
getFile(`${origin.protocol}://${origin.domainName}${origin.path}${request.uri}`, (res) => {
const statusCode = res.statusCode;
console.log(res.headers);
// grab headers from the origin request and reformat them
// to match the lambda#edge return format
const originHeaders = Object.keys(res.headers)
// some headers we get back from the origin
// must be filtered out because they are blacklisted by cloudfront
.filter((header) => blacklistedHeaders.every((blheader) => !blheader.test(header)))
.reduce((acc, header) => {
acc[header.toLowerCase()] = [
{
key: header,
value: res.headers[header]
}
];
return acc;
}, {})
if (statusCode === 200) {
const writeStream = fs.createWriteStream(tmpPath);
res
.on('error', (e) => {
context.succeed({
status: '500',
statusDescription: 'Error downloading the image'
});
})
.pipe(writeStream)
writeStream
.on('finish', () => {
console.log('image downloaded');
try {
// invoke ImageMagick to resize the image
const stdout = child.execSync(
`convert ${tmpPath} -resize ${width}x${height}\\> -quality 80 ${targetPath}`
);
} catch(e) {
console.log('ImageMagick error');
console.log(e.stderr.toString());
context.succeed({
status: '500',
statusDescription: 'Error resizing image'
});
return;
}
const image = fs.readFileSync(targetPath).toString('base64');
context.succeed({
bodyEncoding: 'base64',
body: image,
headers: originHeaders,
status: '200',
statusDescription: 'OK'
});
})
.on('error', (e) => {
console.log(e);
context.succeed({
status: '500',
statusDescription: 'Error writing the image to a file'
});
})
} else {
// grap the status code from the origin request
// and return to the viewer
console.log('statusCode: ', statusCode);
context.succeed({
status: statusCode.toString(),
headers: originHeaders
});
}
})
};
I am getting a 502 error on some images, not all of them
The Lambda function result failed validation: The function tried to add, delete, or change a read-only header.

May Promise.all().finally() return unresolved data?

I found quite unusual behavior of Promise.all().finally() - look like it returning data before map() was applied to this.
1. Data received from database.
2. Making call to Google Maps API inside map(), applied to the data retrieved from database and adding to the object property "Distance" with result from Google API call.
3. Return data in Promise.all() - data received without new property.
I can't get how this even possible?
public static get = async (req: Request, res: Response) => {
const latitude = req.query.lat;
const longitude = req.query.long;
const pool = await new sql.ConnectionPool(CommonConstants.connectionString).connect();
const request = pool.request();
const result = await request.execute('SuppliersSP');
sql.close();
const rows = result.recordset.map(async (supplier) => {
const data = { origin: [latitude, longitude], destination: [supplier.Latitude, supplier.Longitude] };
const distance = await GetDistance(data) || 0;
Object.defineProperty(supplier, 'Distance', {
enumerable: true,
configurable: true,
writable: true,
value: distance
});
return supplier;
})
Promise.all(rows).finally(() => {
res.setHeader('Access-Control-Allow-Origin', '*');
res.status(200).json(rows);
});
}
Here is GetDistance function:
import { Constants } from "./constants";
const https = require('https');
export function GetDistance(coords) {
const { origin, destination } = coords;
return new Promise((resolve, reject) => {
https.get(`${Constants.GoogleMapsUrl}?origins=${origin[0]},${origin[1]}
&destinations=${destination[0]},${destination[1]}
&key=${Constants.GoogleMapsApiKey}`, (resp) => {
let data = '';
resp.on('data', (chunk) => {
data += chunk;
});
resp.on('end', () => {
const distance = JSON.parse(data);
resolve(distance.rows[0].elements[0].distance.value);
});
}).on("error", (err) => {
reject("Error: " + err.message);
});
});
}
It was solved by a change .finally() to .then().

Resources