I have create this "simple pattern" that works for combine Promise and EventEmitter (with nodejs).
But: I'm wondering if there is a better way score a goal?
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emitter.emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
return { promise, emitter };
}
const work = doSomething({ param: 'hello', directory: './' });
work.emitter.on('update-event', data => console.log(`Update ${data}`));
work.promise.then(console.log).catch(console.error);
I was thinking like:
doSomething(...).on(...).then(...)
but I can't figure out how do that.
Node.js has built a function for this: the require('events').once function! Here the PR.
It has been released with Node [v11.13] (https://nodejs.org/en/blog/release/v11.13.0/)
An example usage (from docs):
const { once, EventEmitter } = require('events');
async function run() {
const ee = new EventEmitter();
process.nextTick(() => {
ee.emit('myevent', 42);
});
const [value] = await once(ee, 'myevent');
console.log(value); // 42
const err = new Error('kaboom');
process.nextTick(() => {
ee.emit('error', err);
});
try {
await once(ee, 'myevent');
} catch (err) {
console.log('error happened', err);
}
}
run();
Personally I'm not sure how the accepted answer is related to the OP's question, anyway I think I do have found a rather simple (but maybe not very nice) way of accomplishing the specific doSomething(...).on(...).then(...) thing asked by the OP. Taking the OP's example code, we can just do something like the following:
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
var resolves;
var rejects;
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
resolves = resolve;
rejects = reject;
});
promise.on = emitter.on;
promise.emit = emitter.emit;
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
rejects(err);
return;
}
files.forEach(file => promise.emit('update-event', file));
resolves(`I'm done: ${parameters.param} world`);
});
return promise;
}
const work = doSomething({ param: 'hello', directory: './' });
work.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);
It works for my limited cases so far, and both the event and the promise can be chained without issues as far as I know. There might be problems for more complicated use-cases that I have not encountered yet, but it does serve the purpose of chaining doSomething(...).on(...).then(...) like the OP asked.
No, you shouldn't combine an event emitter and a promise in one object. Returning them separately, just like you did, is fine.
Of course, in your particular example, there's not really a reason to use an event emitter at all. It just fires when the promise fulfills anyway. Much simpler:
const fs = require('fs');
function doSomething(parameters) {
return new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) reject(err);
else resolve(Object.assign(files, parameters));
});
});
}
doSomething({ param: 'hello', directory: './' }).then(files => {
for (const data of files) {
console.log(`Update ${data}`)
}
return `I'm done: ${files.param} world`;
}).then(console.log, console.error);
I would suggest this :
import EventPromised from "event-promised";
function doSomething(parameters) {
return new EventPromised((resolve, reject, emit) => {
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
}
doSomething({ param: 'hello', directory: './' })
.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);
Related
I have a lambda function with the structure below,
It used to work in older versions of nodejs but it doesn't work with the newer versions.
I know my code structure is quite messy and wrong but I can't get my head around it. I'm trying to use Promise.all but I'm obviously doing something wrong cause it's not getting executed at all.
By the way, I'm not getting any errors. The promise.all method never gets executed.
let AWS = require('aws-sdk');
exports.handler = async(event, context, callback) => {
let result = {};
try {
result = await getOrder(sql, 0);
result.map(
(dataField) => {
});
}
catch (error) {
console.log(error);
callback(error);
}
var today_result = [];
const groupKey = i => i.user_id + '_' + i.when;
const counts = _.countBy(followingsIDs, groupKey);
const isMulti = i => counts[groupKey(i)] > 1;
const multiPropkey = i => ({ multiplekey: isMulti(i) ? groupKey(i) : groupKey(i) });
const multiProp = i => ({ multiple: isMulti(i) ? counts[groupKey(i)] : 1 });
const updated = _.map(followingsIDs, i => _.extend(i, multiProp(i), multiPropkey(i)));
const uniqResult = _.uniq(updated, function(d) { return d.multiplekey });
// Doesn’t execute from here —>
await Promise.all(uniqResult.map(async(dataField) => {
console.log("test_");
dosomething()
if (true) {
let sql = `INSERT INTO ….`
result = await getOrder(sql, 0);
try {
const data = await sns.publish(params).promise();
}
catch (e) {
console.log(e.stack);
response.result = 'Error';
}
}
}));
// Till here <----
callback(null, uniqResult);
};
let getOrder = async(sql, params) => {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) throw err;
connection.query(sql, params, (err, results) => {
if (err) {
reject(err);
}
// console.log("-----Query Done!");
connection.release();
// console.log("-----Data: ", results);
resolve(results);
});
});
});
};
What are you awaiting to? The uniqResult is just declared as an empty array. Immediately after that you pass it to Promise.all. You need to fill it with Promises and then pass it to Promise.all.
I m totally a newbie in nodejs.Trying to read a json file in a service and then use it in an other service.
It tells me that my service is undefined. What am i doing wrong ?
thanks in advance !
JsonService
'use strict';
const fs = require('fs');
exports.getJobs = ()=> {
fs.readFile('./common/data/jobs.json', (err, data) => {
if (err) throw err;
if (data) {
return new Promise(function (resolve, reject) {
resolve(data);
});
}
});
}
And JobsService
const JsonService = require('../../common/services/json.service');
exports.list = () => {
let jobs;
JsonService.getJobs().then((data)=>{
jobs = data;
return new Promise((resolve, reject) => {
if(jobs){
resolve(jobs);
}else{
reject(jobs)
}
});
});
};
If you just need to read the json file, you could simply do
const jobs = require("./common/data/jobs.json");
Hope this helps.
The reason why your code is not working because you are trying to use it as a promise but getJobs doesn't return a promise. You need to wrap it in promise and then return. This is the way
const fs = require("fs");
exports.getJobs = () => {
return new Promise((resolve, reject) => {
fs.readFile("./common/data/jobs.json", (err, data) => {
if (err) {
reject(err);
}
if (data) {
resolve(data);
}
});
});
};
and use it like this
const JsonService = require("../../common/services/json.service");
exports.list = () => {
let jobs;
JsonService.getJobs().then(data => {
console.log(data);
});
};
No need it wrap it in promise here.
But in your case simply requiring should work as I mentioned in above.
So i encountered a problem while doing my project.The problem is that when i try to write my data to csv file,it only write half of the data ,even sometimes only less than half of my data.I don't know what the problem is because there is no error shown in the terminal.
Below is my code
async function getFile(req, res, next) {
try {
let URI;
const listOfKeys = await listAllKeys();
let temp = []
await Promise.all(listOfKeys.map(async function (data) {
let response = await writeFile(data.Key);
temp.push(response)
}))
.then(async _ => {
fs.writeFileSync(FILE_PATH, Buffer.concat(temp));
})
.catch(err => {
console.log(err)
})
return res.json({ message: 'halo' });
} catch (err) {
console.log('hmm.... error', err);
return next(new APIError(err, httpStatus.INTERNAL_SERVER_ERROR, true));
};
};
And this is the writeFile function
function writeFile(key) {
return new Promise((resolve, reject) => {
s3.getObject({ Bucket: process.env.AWS_BUCKET, Key: key }, (err, data) => {
if (err) reject(err)
else resolve(data.Body)
})
});
};
If possible, i would like to know the detail of my problem and how to fix it.Thanks.
It looks to me like you can do it like this (function names have been modified to make sense to me):
const fsp = require('fs').promises;
async function getDataAndWriteFile(req, res, next) {
try {
let URI;
const listOfKeys = await listAllKeys();
let responses = await Promise.all(listOfKeys.map(function (data) {
return getData(data.Key);
}));
await fsp.writeFile(FILE_PATH, Buffer.concat(responses);
res.json({ message: 'halo' });
} catch(err) {
console.log('hmm.... error', err);
next(new APIError(err, httpStatus.INTERNAL_SERVER_ERROR, true));
}
}
function getData(key) {
return new Promise((resolve, reject) => {
s3.getObject({ Bucket: process.env.AWS_BUCKET, Key: key }, (err, data) => {
if (err) reject(err)
else resolve(data.Body)
})
});
}
Summary of changes:
Change function names to better describe what they do
Use let responses = await Promise.all() to get the data from the promise array.
Use the promise interface in the fs module with await fsp.writeFile() to write the data out to your file.
Use try/catch to catch all the promise rejections in one place
Possible Open Issues:
Writing this Buffer.concat(responses) to disk seems kind of odd. Is that really what you want in this file?
I am trying to pass information from a database lookup through socket.io and I am having more troubles than I thought I would.
Here is the socket.io emit:
module.exports = (app, io) => {
io.sockets.on('connect', async socket => {
if (!socket.handshake.session.passport) {
return socket.emit('redirect');
}
//Online
console.log(Object.keys(app.online).length);
//Load up
await loadup(app, socket).then(data => {
console.log(data);
});
await socket.emit('load up', loadup(app, socket));
}
(I tried multiple ways to get the desired data to show)
Here is the actual method:
const statHandler = require('./loadhandling/statload');
const User = require('../models/User');
module.exports = async (app, socket) => {
let isOnline = app.online[socket.handshake.session.passport.user];
if (!isOnline) return;
// Start the loadup process
User.findById(socket.handshake.session.passport.user, (err, user) => {
if (err) console.log(err);
let userdata = user.meta;
statHandler(userdata);
return userdata;
});
};
I've always seemed to struggle in these situations for some reason.
I managed to fumble my through this:
Socket.io
module.exports = (app, io) => {
io.sockets.on('connect', async socket => {
if (!socket.handshake.session.passport) {
return socket.emit('redirect');
}
//Online
console.log(Object.keys(app.online).length);
//Load up
const load = await loadup(app, socket);
await socket.emit('load up', load);
}
Method:
const statHandler = require('./loadhandling/statload');
const User = require('../models/User');
module.exports = async (app, socket) => {
let isOnline = app.online[socket.handshake.session.passport.user];
if (!isOnline) return;
// Start the loadup process
const thisUser = await User.findById(
socket.handshake.session.passport.user,
(err, user) => {
if (err) console.log(err);
}
).then(user => {
let userdata = user.meta;
statHandler(userdata);
return new Promise((res, rej) => {
res({ userdata });
});
});
return await thisUser;
};
I'm sure theres a more efficient way, but I don't feel like finding one. Hope this helps someone in the future.
I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}