Using NodeJS promises for waiting the process of conversion completed - node.js

I've been trying for 1 week without success to render the psd conversion through this nodejs module: https://www.npmjs.com/package/psd trying to make a confirmation message appear after converting all the images.
I don't know if the problem can be traced to my code or promise compliance, I've tried like 50 times to change every aspect of this code.
In psdconverter.js file
//! PSD COMPONENT MODULES
var PSD = require('psd');
//! file and extension MODULES
const fs = require('fs');
var path = require('path');
const PSDconverter = async (filename) => {
return new Promise((resolve, reject) => {
PSD.open('./img/' + filename).then(function (psd) {
let newfilename = filename.replace(/.psd/i, ""); //REPLACE CASE INSENSITIVE
psd.image.saveAsPng('./img/' + newfilename + '.png');
return newfilename;
}).then(function (res) {
console.log('PSD Conversion Finished!' + res);
resolve(res);
}).catch(function (err) {
console.log(err);
});
})
}
const EnumAndConvert = async () => {
return new Promise((resolve, reject) => {
//! READ DIR IMAGE AND CONVERTION PART
fs.readdir('./img/', (err, files) => {
if (err)
console.log(err + ' errore di conversione, non è stata trovata la cartella img!');
else {
for (let filename of files){
var ext = path.extname('./img/' + filename);
if (ext === '.PSD' || ext === '.psd')
await PSDconverter(filename);
}
}
})
resolve("Everything is converted successfully");
})
}
exports.PSDconverter = PSDconverter;
exports.EnumAndConvert = EnumAndConvert;
in index.js file
function PSDconverter() {
//! convertitore PSD
let EnumPSDAndConvert = require('./psdconverter.js');
EnumPSDAndConvert.EnumAndConvert().then((res) => { //dopo che è ritornata la Promise risolta continuo
console.log(res+"ciao");
})
}
ERROR RESULT:
await PSDconverter(filename);
^^^^^
SyntaxError: await is only valid in async function
When i want the first to be the last one.
Thank you for every help!

Ok the solution is:
index.js
function PSDconverter() {
//! convertitore PSD
let EnumPSDAndConvert = require('./psdconverter.js');
EnumPSDAndConvert.EnumAndConvert().then(() => {
console.log("Conversion Completed");
})
}
psdconverter.js
//! PSD COMPONENT MODULES
var PSD = require('psd');
//! file and extension MODULES
const fs = require('fs');
var path = require('path');
const PSDconverter = (filename) => { //without async
return PSD.open('./img/' + filename).then(function (psd) {
let newfilename = filename.replace(/.psd/i, ""); //REPLACE CASE INSENSITIVE
psd.image.saveAsPng('./img/' + newfilename + '.png');
return newfilename;
}).then(function (res) {
console.log('PSD Conversion Finished!' + res);
}).catch(function (err) {
console.log(err);
});
}
function readImgDir() {
return new Promise((resolve, reject) => {
fs.readdir('./img/', (err, files) => {
if (err)
console.log(err + ' errore di conversione, non è stata trovata la cartella img!');
else {
resolve(files);
}
})
})
}
const EnumAndConvert = async () => {
var files = await readImgDir(); //! READ DIR IMAGE AND CONVERTION PART
for (let filename of files) {
var ext = path.extname('./img/' + filename);
if (ext === '.PSD' || ext === '.psd')
await PSDconverter(filename);
}
}
exports.PSDconverter = PSDconverter;
exports.EnumAndConvert = EnumAndConvert;
If there are any suggestions on how to improve the code I would be curious.
Thanks Again

Related

Resizing Images already on Google Storage locally through SHARPJS, and keeping/updating the downloadUrl

There was similar questions/answers but not recently and none with the exact requirements.
I have many pictures for a dating app on Firebase Storage, uploaded from the users, with a downloadUrl saved on Firestore. Just noticed it is saved as very big pictures, and slow down loading of the users. Result: I need to resize and reformat to jpeg all pictures on firebase storage.
My research and trials for now 2 months brought me to the following conclusions:
It's not possible through Google Functions as the quota of 9 minutes is too slow to do the whole resizing.
Sharp is the best library to do this, but better do it locally.
I can use gsutil as in this Question Here to download all pictures and keep the path, resize it, and upload it later.
I was blocked at finding how I can resize/reformat with Sharp and whilst the name will be different and probably the metadata kept out, how can I uploaded it back and at the same time get a new downloadUrl so that I can in turn upload it to firestore in the users collection?
MY POTENTIAL SOLUTION (STEP 4):
Not sure if it will work, but I'd have a listening function for changed (finalized) object and getting info from the image to upload it back on firestore, using a self-made downloadUrl.
MY NEW QUESTION: Is it going to work? I'm afraid to break the pictures of all my users...
For your better understanding, here is my process so far:
1. Download Images
gsutil cp -r gs://my-bucket/data [path where you want to download]
2. Script (typescript) to resize/reformat them.
import * as fs from "fs";
import sharp from "sharp";
import * as path from "path";
const walk = (dir: string, done) => {
let results = [];
fs.readdir(dir, (err, list) => {
if (err) return done(err);
let i = 0;
(function next() {
let file = list[i++];
if (!file) return done(null, results);
file = path.resolve(dir, file);
fs.stat(file, (err, stat) => {
if (stat && stat.isDirectory()) {
walk(file, (err, res) => {
results = results.concat(res);
next();
});
} else {
results.push(file);
next();
}
});
})();
});
};
const reformatImage = async (filesPaths: string[]) => {
let newFilesPaths: string[] = [];
await Promise.all(
filesPaths.map(async (filePath) => {
let newFileName = changeExtensionName(filePath);
let newFilePath = path.join(path.dirname(filePath), NewFileName);
if (filePath === newFilePath) {
newFileName = "rszd-" + newFileName;
newFilePath = path.join(path.dirname(filePath), newFileName);
}
newFilesPaths.push(newFilePath);
try {
await sharp(filePath)
.withMetadata()
.resize(600, 800, {
fit: sharp.fit.inside,
})
.toFormat("jpeg")
.jpeg({
mozjpeg: true,
force: true,
})
.toFile(newFilePath)
.then(async (info) => {
console.log("converted file...", info);
})
.catch((error) => {
console.log("sharp error: ", error);
});
} catch (error) {
console.error("error converting...", error);
}
})
);
console.log("THIS IS THE RESIZED IMAGES");
console.log(newFilesPaths);
};
const changeExtensionName = (filePath: string) => {
const ext = path.extname(filePath || "");
const virginName = path.basename(filePath, ext);
const newName = virginName + ".jpg";
return newName;
};
walk("./xxxxxx.appspot.com", (err, results) => {
if (err) throw err;
console.log("THIS IS THE DOWNLOADED IMAGES");
console.log(results);
reformatImage(results);
});
3. Re-upload the files
gsutil cp -r [path your images] gs://my-bucket/data
4. Listen for new file update through a Firebase Functions, and update the new downloadUrl
export const onOldImageResizedUpdateDowloadUrl = functions.storage
.object()
.onFinalize(async (object: any) => {
if (object) {
functions.logger.log('OBJECT: ', object);
const fileBucket = object.bucket;
const filePath: string = object.name;
const userId = path.basename(path.dirname(filePath));
const fileName = path.basename(filePath);
const isResized = fileName.startsWith('rszd-');
if (!isResized) {return;}
const token = object.metadata.firebaseStorageDownloadTokens;
const downloadUrl = createDownloadUrl(
fileBucket,
token,
userId,
fileName
);
const pictureId = 'picture' + fileName.charAt(5); // pictures are named as eg "rszd-" + 1.jpeg
admin
.firestore()
.collection('users')
.doc(userId)
.update({ [pictureId]: downloadUrl });
}
});
function createDownloadUrl(
bucketPath: string,
downloadToken: string,
uid: string,
fileName: string) {
return `https://firebasestorage.googleapis.com/v0/b/${bucketPath}/o/pictures-profil%2F${uid}%2F${fileName}?alt=media&token=${downloadToken}`;
}

readFile synchronously nodejs

I am new to nodejs and just started learning. I need to read 5 json files and place them in an array. I have created 2 functions: readDirectory and processFile.
let transactionArray = [];
router.get('/', (req,res) => {
//joining path of directory
const directoryPath = path.join(__dirname, '../data');
readDirectory(directoryPath);
res.send(JSON.stringify(transactionArray))
})
readDirectory will get the dir and will read the filenames.
function readDirectory(directoryPath){
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
//handling error
if (err) {
return console.log('Unable to scan directory: ' + err);
}
//listing all files using map
let fileSummary = files.map(file => {
//get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath +'/'+ file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
return
}
try {
let data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName);
} catch(err) {
console.error(err)
}
})
})
});
}
Then it will read the file using function processFile.
function processFile(data, categoryName)
{
let paymentSource = ''
if (categoryName == 'category1'){
paymentSource = categoryName +': '+ categoryName +' '+ data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName +': '+ data.extra.payer +'-'+ data.currency_code
} else {
paymentSource = 'Others'
}
let transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray);
}
The console log is something like this:
[{Transaction1}] [{Transaction1},{Transaction2}] [{Transaction1},{Transaction2},{Transaction3}]
but the result on the UI is only []
During debug, I noticed that it is not reading synchronously so I tried using readFileSync but it did not work. How can I read both functions synchronously so it will not give an empty array?
Do some playing around to understand what the fs functions do when they have callbacks, and when they're synchronous. From the code that you have we have make a few changes so that you don't have to use the synchronous functions from the file system library.
First of all you need to wait for all the asynchronous tasks to complete before returning response.
router.get('/', async (req, res) => {
// joining path of directory
const directoryPath = path.join(__dirname, '../data')
readDirectory(directoryPath).then(() => {
res.send(JSON.stringify(transactionArray))
}).catch(err => {
res.status(500).json(err)
})
})
Secondly, to keep the code as is as to teach you something about promises, lets wrap the first function in a promise.
function readDirectory (directoryPath) {
return new Promise((resolve, reject) => {
// passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// handling error
if (err) {
return console.log('Unable to scan directory: ' + err)
}
// listing all files using map
const fileSummary = Promise.all(
files.map(file => {
return new Promise((resolve, reject) => {
// get the filename
let categoryName = ''
if (file.includes('category1')) {
categoryName = 'category1'
} else if (file.includes('category2')) {
categoryName = 'category2'
} else {
categoryName = 'Others'
}
// read the file
const filePath = directoryPath + '/' + file
fs.readFile(filePath, 'utf8', (err, fileContents) => {
if (err) {
console.error(err)
reject(err)
}
try {
const data = JSON.parse(fileContents, categoryName)
processFile(data, categoryName).then(data => {
data()
})
} catch (err) {
console.error(err)
reject(err)
}
})
})
})
).then(() => {
resolve()
}).catch(err => {
reject(err)
})
})
})
}
Please refer to the bible (MDN) for javascript about promises -> https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Promise
And finally wrap the processFile function in a promise
function processFile (data, categoryName) {
return new Promise((resolve, reject) => {
let paymentSource = ''
if (categoryName == 'category1') {
paymentSource = categoryName + ': ' + categoryName + ' ' + data.currency_code
} else if (categoryName == 'category2') {
paymentSource = categoryName + ': ' + data.extra.payer + '-' + data.currency_code
} else {
paymentSource = 'Others'
}
const transactionDetails = new Transaction(
data.id,
data.description,
categoryName,
data.made_on,
data.amount,
data.currency_code,
paymentSource)
transactionArray.push(transactionDetails)
console.log(transactionArray)
resolve()
})
}
What the heck am I doing? I'm just making your code execute asynchronous task, but wait for them to be completed before moving on. Promises are a way to handle this. You can easily pull this off with the FS synchronous functions, but this way you can learn about promises!

Converting an Excel file to CSV and reading its contents in JSON format

When an excel file is uploaded in my program, it needs to get converted to a CSV file to be read. The process is working fine and I am using asyc/await, however whenever I try to read the file using the csvtojson package in Node, the file does not get read properly. If I directly use a CSV file then it works fine. The issue arises when the conversion occurs.
product.js
const multer = require('multer');
const express = require('express');
const router = express.Router();
const csv = require('csvtojson');
const fs = require('fs');
const xlsx = require('node-xlsx');
router.post('/upload', upload.single('singleFile'), async (req, res) => {
let csvFilePath = req.file.path;
let fileType = req.file.mimetype;
const convertToCSV = async _ => {
console.log("2");
if (fileType === 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ||
fileType === 'application/vnd.ms-excel') {
let obj = xlsx.parse(csvFilePath);
let rows = [];
let writeStr = "";
for (let i = 0; i < obj.length; i++) {
let sheet = obj[i];
for (let j = 0; j < sheet['data'].length; j++) {
rows.push(sheet['data'][j]);
}
}
//creates the csv string to write it to a file
for (let i = 0; i < rows.length; i++) {
writeStr += rows[i].join(",") + "\n";
}
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
return res.status(400).send({'error': err});
}
console.log("4");
console.log("out.csv was saved in the current directory!");
});
}
}
console.log("1");
await convertToCSV().then(async _ => {
console.log("5");
const jsonArray = await csv({flatKeys: true})
.fromFile(csvFilePath)
.then(async (jsonObj) => {
console.log("6");
console.log(jsonObj[0]);
...
//Few more functions
}).catch(err => {
return res.status(400).send(err);
});
});
});
My console log looks like this
1
2
3
5
4
out.csv was saved in the current directory!
6
{
'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b�h^\u0001\u0000\u0000�\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml �\u0004\u0002(�\u0000\u0002\u0000\u0000\
u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00
Whereas if a new CSV file is uploaded or an existing CSV is used then the output for the console.log(jsonObj[0]); is
{
'Column1': 'Column 1 Data',
field2: 'field2 Data',
field3: 'field 3 Data',
Categories: 'categories',
....
}
I added await before fs.writeFile however the same issue arises. There are two files that get saved under the directory csv/
c33129f3bdef482657992dbf452d2c1b
out.csv
And the contents of the previous file are read (assuming that, since they are very similar) and out.csv is not read.
Update
Wrapped a promise around fs.writeFile and the console is ordered now, however the output for the data read is yet the same:
const convertToCSV = async _ => {
return new Promise(((resolve, reject) => {
console.log("2");
....
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
return res.status(400).send({'error': err});
}
console.log("4");
console.log("out.csv was saved in the current directory!");
resolve();
});
}
}));
}
Console Log
1
2
3
4
out.csv was saved in the current directory!
5
6
{
'PK\u0003\u0004\u0014\u0000\u0006\u0000\b\u0000\u0000\u0000!\u0000b�h^\u0001\u0000\u0000�\u0004\u0000\u0000\u0013\u0000\b\u0002[Content_Types].xml �\u0004\u0002(�\u0000\u0002\u0000\u0000\
u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u0000\u00
You don't wait fs.writeFile("csv/out.csv" that's why you get 5 before 4 in console.
You should wrap your function content into Promise:
const convertToCSV = async _ => {
return new Promise((resolve, reject) => {
console.log("2");
...
console.log("3");
fs.writeFile("csv/out.csv", writeStr, function (err) {
if (err) {
// I'd prefer to call "reject" here and add try/catch outside for sending 400
return resolve(res.status(400).send({'error': err}));
}
console.log("4");
console.log("out.csv was saved in the current directory!");
resolve();
});
})
)
Also you read csvFilePath that contains a filename of Excel file and not a CSV that was stored under csv/out.csv name.

Export CSV without crash from node js

I need to export CSV for large amount of data, like it's having 100k rows, I'm using Json2CSV but sometimes it's taking a long time and crashed. I'm using node js stream but no use. I am googling but I don't have idea to clear it.Please any one can guide me how can I fix it.
Node JS
var formatedData = {......} //object with data
let filename = 'test' + '.csv';
let pathName = await writeToCSV(filename, formatedData, fields);
let readStream = fs.createReadStream(pathName);
res.setHeader('Content-disposition', `attachment; filename=${filename}`);
res.set('Content-Type', 'text/csv');
let downloadStream = readStream.pipe(res);
fields = null;
formatedData = null;
downloadStream.on('finish', function() {
fs.unlink(pathName, function() {});
downloadStream = null;
readStream = null;
});
writeToCSV
function writeToCSV(filename, data, fields, option) {
return new Promise((resolve, reject) => {
if (typeof data !== 'object') {
return reject(new Error('Data is not an object'));
}
let options = {
fields
};
if (typeof option === 'object') {
for (let key in option) {
options[key] = option[key];
}
}
let tmpPath = path.join(__dirname, '..', tmp);
let pathFile = tmpPath + filename;
return Promise.all([Promise.resolve(json2csv(data, options).split('\n')), checkTMPExist(tmpPath)]).then(data => {
let csvFormat = data[0];
let writeStream = fs.createWriteStream(pathFile);
csvFormat.forEach((lines, index) => {
if (index === csvFormat.length - 1) {
writeStream.end(lines + '\n');
} else {
writeStream.write(lines + '\n');
}
});
writeStream.on('finish', function() {
this.end();
return resolve(pathFile);
});
writeStream.on('error', function(err) {
this.end();
fs.unlink(pathFile, () => {});
return reject(err);
});
}).catch(err => {
fs.unlink(pathFile, () => {});
return reject(err);
});
});
}
Front end Ajax call
function export_csv(url, fileName) {
$.ajax({
url: url,
type: "GET",
success: function (result) {
var encodedUri = 'data:application/csv;charset=utf-8,' + encodeURIComponent(result);
var link = document.createElement("a");
link.setAttribute("href", encodedUri);
link.setAttribute("download", fileName);
document.body.appendChild(link);
link.click();
},
error: function (xhr) {
console.log("Export csv have some issue:" + JSON.stringify(xhr));
}
});
}

How to convert all djvu files to pdf

it's answer. Just use nodejs and ddjvu from DJView lib.
There
imports
const fs = require('fs');
const os = require('os');
const {spawn} = require('child_process');
const path = require('path');
const maxProcess = os.cpus().length - 1;// count of procces - 1 for system needs
let nowPlayed = 0;
method for convert file, and delete when converted.
function chpoc(args) {
console.log(args[1] + " start converting");
spawn(`ddjvu`, ["-format=pdf", args[0], args[1] + ".pdf"]).on('close', (data) => {
console.log(args[1] + ".pdf converted");
fs.unlink(args[0], (err) => {
if (err) throw err;
console.log(args[0] + ' successfully deleted!');
nowPlayed--;
})
});
}
queue for optimize max convertions at one time
let queue = [];
function startQueue() {
if (nowPlayed < maxProcess && queue.length) {
nowPlayed++;
queue.pop()();
}
}
setInterval(startQueue, 500)
fillthe queue and start it
function workWithFile(filepath) {
const args = filepath.match(/(.*)\.djvu/)
if (args && args.length) {
queue.push(() => {
chpoc(args);
});
}
}
show errors
const eachCallback = function (err) {
err && console.error(err);
}
catalog three and finde the djvus
let filePaths = [];
function getFiles(dirPath, callback) {
fs.readdir(dirPath, function (err, files) {
if (err) return callback(err);
files.forEach((fileName) => {
setTimeout(() => {
let filePath = path.join(dirPath, fileName);
if (filePath) {
fs.stat(filePath, function (err, stat) {
if (err) return eachCallback(err);
if (stat.isDirectory()) {
getFiles(filePath, callback);
} else if (stat.isFile() && /\.djvu$/.test(filePath)) {
filePaths.push(filePath);
callback(filePath)
}
})
}
});
});
});
}
init from started dir
getFiles(__dirname, function (file) {
workWithFile(file);
});
imports
const fs = require('fs');
const os = require('os');
const {spawn} = require('child_process');
const path = require('path');
const maxProcess = os.cpus().length - 1;// count of procces - 1 for system needs
let nowPlayed = 0;
method for convert file, and delete when converted.
function chpoc(args) {
console.log(args[1] + " start converting");
spawn(`ddjvu`, ["-format=pdf", args[0], args[1] + ".pdf"]).on('close', (data) => {
console.log(args[1] + ".pdf converted");
fs.unlink(args[0], (err) => {
if (err) throw err;
console.log(args[0] + ' successfully deleted!');
nowPlayed--;
})
});
}
queue for optimize max convertions at one time let queue = [];
function startQueue() {
if (nowPlayed < maxProcess && queue.length) {
nowPlayed++;
queue.pop()();
}
}
setInterval(startQueue, 500)
fill the queue and start it
function workWithFile(filepath) {
const args = filepath.match(/(.*)\.djvu/)
if (args && args.length) {
queue.push(() => {
chpoc(args);
});
}
}
show errors
const eachCallback = function (err) {
err && console.error(err);
}
catalog three and finde the djvus
let filePaths = [];
function getFiles(dirPath, callback) {
fs.readdir(dirPath, function (err, files) {
if (err) return callback(err);
files.forEach((fileName) => {
setTimeout(() => {
let filePath = path.join(dirPath, fileName);
if (filePath) {
fs.stat(filePath, function (err, stat) {
if (err) return eachCallback(err);
if (stat.isDirectory()) {
getFiles(filePath, callback);
} else if (stat.isFile() && /\.djvu$/.test(filePath)) {
filePaths.push(filePath);
callback(filePath)
}
})
}
});
});
});
}
init from started dir
getFiles(__dirname, function (file) {
workWithFile(file);
});

Resources