Avoiding callback hell - node.js

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?

Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});

You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.

With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Related

In node.js, why is my data not getting passed back after a asynchronous file read using a Promise

I know for sure that my pullData module is getting the data back from the file read but the function calling it, though it has an await, is not getting the data.
This is the module (./initialise.js) that reads the data:
const fs = require('fs');
const getData = () => {
return new Promise((resolve, reject) => {
fs.readFile('./Sybernika.txt',
{ encoding: 'utf8', flag: 'r' },
function (err, data) {
if (err)
reject(err);
else
resolve(data);
});
});
};
module.exports = {getData};
And this is where it gets called (app.js):
const init = require('./initialise');
const pullData = async () => {
init.getData().then((data) => {
return data;
}).catch((err) => {
console.log(err);
});
};
const start = async() => {
let data = await pullData();
console.log(data);
}
start();
putting 'console.log(data)' just before return(data) in the resolve part of the call shows the data so I know it's being read OK. However, that final console.log shows my data variabkle as being undefined.
Any suggestions?
It's either
const pullData = async () => {
return init.getData().then((data) => {
return data;
}).catch((err) => {
console.log(err);
});
};
or
const pullData = async () =>
init.getData().then((data) => {
return data;
}).catch((err) => {
console.log(err);
});
Both versions make sure a promise returned by then/catch is passed down to the caller.

My node js program is giving me a "TypeError: Cannot read property 'then' of undefined", when reading a json file

I'm trying to read frpm a json file folder withing my program and i want to use a GET list endpoint to read through browser or postman, but i'm getting the above TypeError. Here is my code:
model.js:
const fs = require('fs');
function loadTeams() {
return new Promise((resolve, reject) => {
fs.readFile('./json/prov-nodes.json', (err, data) => {
if (err) reject(err);
const teams = JSON.parse(data);
console.log(teams);
resolve(teams);
});
});
}
app.use(bodyParser.json());
app.get('/list', (req, res) => {
let teams = [];
loadTeams()
.then(function(data){
teams = JSON.stringify(data);
console.log(teams);
**res.send(teams);** //intended to send to browser/postman response
console.log('try...part ..read call');
})
.catch(error => console.log(error))
res.send("My root page");
console.log(teams);
});
The loadTeams function does not return a promise, and therefore you cannot call .then().
You can wrap the function in a promise like this:
function loadTeams() {
return new Promise(function(resolve, reject) {
fs.readFile('./json/prov-nodes.json', (err, data) => {
if (err) reject(err);
try {
const teams = JSON.parse(data);
return resolve(teams);
} catch(e) {
reject(e);
}
});
});
}
In order to use loadTeams as an async function you should turn it into a function that returns Promise with a callback results:
function loadTeams() {
return new Promise((resolve, reject) => {
fs.readFile('./json/prov-nodes.json', (err, data) => {
if (err) reject(err);
const teams = JSON.parse(data);
console.log(teams);
resolve(teams);
});
});
}

Node.js Multer with Imagemin for file compression

This is my code :
const imagemin=require('imagemin');
const imageminMozjpeg = require('imagemin-mozjpeg');
const multer=require('multer');
const upload=multer({storage:storage});
var imageUpload=upload.single('file');
var storage=multer.diskStorage(
{
destination:function(req,file,cb)
{
cb(null,'./tempuploads/')
},
filename:function(req, file, cb)
{
cb(null,file.originalname)
}
});
exports.filecompressor=async(req,res)=>
{
imageUpload(req, res, function(err)
{
let filetoupload=req.file;
let filename=filetoupload.originalname;
if(err)
{
console.log(err)
}
let temppath="./tempuploads/"+filename;
imagemin([temppath],
{
destination: './cachedimages/',
plugins:[imageminMozjpeg({quality: 50})]
}).then(result=>
{
console.log(result)
});
});
};
I want imageUpload(req, res, function(err){}); to return temppath so that I can use imagemin() function outside it and add await to it.
Can anybody please help me??
You can convert imageUpload callback to a promise, for example:
exports.filecompressor = async (req, res) => {
return new Promise((resolve, reject) => {
imageUpload(req, res, function (err) {
let filetoupload = req.file;
let filename = filetoupload.originalname;
if (err) {
console.log(err);
return reject(err);
}
let temppath = "./tempuploads/" + filename;
resolve(temppath);
});
});
};

NodeJS service function is undefined

I m totally a newbie in nodejs.Trying to read a json file in a service and then use it in an other service.
It tells me that my service is undefined. What am i doing wrong ?
thanks in advance !
JsonService
'use strict';
const fs = require('fs');
exports.getJobs = ()=> {
fs.readFile('./common/data/jobs.json', (err, data) => {
if (err) throw err;
if (data) {
return new Promise(function (resolve, reject) {
resolve(data);
});
}
});
}
And JobsService
const JsonService = require('../../common/services/json.service');
exports.list = () => {
let jobs;
JsonService.getJobs().then((data)=>{
jobs = data;
return new Promise((resolve, reject) => {
if(jobs){
resolve(jobs);
}else{
reject(jobs)
}
});
});
};
If you just need to read the json file, you could simply do
const jobs = require("./common/data/jobs.json");
Hope this helps.
The reason why your code is not working because you are trying to use it as a promise but getJobs doesn't return a promise. You need to wrap it in promise and then return. This is the way
const fs = require("fs");
exports.getJobs = () => {
return new Promise((resolve, reject) => {
fs.readFile("./common/data/jobs.json", (err, data) => {
if (err) {
reject(err);
}
if (data) {
resolve(data);
}
});
});
};
and use it like this
const JsonService = require("../../common/services/json.service");
exports.list = () => {
let jobs;
JsonService.getJobs().then(data => {
console.log(data);
});
};
No need it wrap it in promise here.
But in your case simply requiring should work as I mentioned in above.

Combine Promise and EventEmitter

I have create this "simple pattern" that works for combine Promise and EventEmitter (with nodejs).
But: I'm wondering if there is a better way score a goal?
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emitter.emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
return { promise, emitter };
}
const work = doSomething({ param: 'hello', directory: './' });
work.emitter.on('update-event', data => console.log(`Update ${data}`));
work.promise.then(console.log).catch(console.error);
I was thinking like:
doSomething(...).on(...).then(...)
but I can't figure out how do that.
Node.js has built a function for this: the require('events').once function! Here the PR.
It has been released with Node [v11.13] (https://nodejs.org/en/blog/release/v11.13.0/)
An example usage (from docs):
const { once, EventEmitter } = require('events');
async function run() {
const ee = new EventEmitter();
process.nextTick(() => {
ee.emit('myevent', 42);
});
const [value] = await once(ee, 'myevent');
console.log(value); // 42
const err = new Error('kaboom');
process.nextTick(() => {
ee.emit('error', err);
});
try {
await once(ee, 'myevent');
} catch (err) {
console.log('error happened', err);
}
}
run();
Personally I'm not sure how the accepted answer is related to the OP's question, anyway I think I do have found a rather simple (but maybe not very nice) way of accomplishing the specific doSomething(...).on(...).then(...) thing asked by the OP. Taking the OP's example code, we can just do something like the following:
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
var resolves;
var rejects;
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
resolves = resolve;
rejects = reject;
});
promise.on = emitter.on;
promise.emit = emitter.emit;
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
rejects(err);
return;
}
files.forEach(file => promise.emit('update-event', file));
resolves(`I'm done: ${parameters.param} world`);
});
return promise;
}
const work = doSomething({ param: 'hello', directory: './' });
work.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);
It works for my limited cases so far, and both the event and the promise can be chained without issues as far as I know. There might be problems for more complicated use-cases that I have not encountered yet, but it does serve the purpose of chaining doSomething(...).on(...).then(...) like the OP asked.
No, you shouldn't combine an event emitter and a promise in one object. Returning them separately, just like you did, is fine.
Of course, in your particular example, there's not really a reason to use an event emitter at all. It just fires when the promise fulfills anyway. Much simpler:
const fs = require('fs');
function doSomething(parameters) {
return new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) reject(err);
else resolve(Object.assign(files, parameters));
});
});
}
doSomething({ param: 'hello', directory: './' }).then(files => {
for (const data of files) {
console.log(`Update ${data}`)
}
return `I'm done: ${files.param} world`;
}).then(console.log, console.error);
I would suggest this :
import EventPromised from "event-promised";
function doSomething(parameters) {
return new EventPromised((resolve, reject, emit) => {
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
}
doSomething({ param: 'hello', directory: './' })
.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);

Resources