I am experimenting with async/await code to read file.
Here's my code:
var fs = require('fs');
function readFile(fileName) {
return new Promise(resolve => {
//console.log(test);
fs.readFile(fileName, 'utf8', function (err, data) {
if (err) throw err;
console.log(fileName)
console.log(data)
})
resolve();
});
}
async function run() {
await readFile('file1.txt');
await readFile('file2.txt');
readFile('file3.txt');
}
run();
But the result is still random. It means file3 sometime read before file2. Where am I doing wrong?
There are many ways to achieve that.
Most of them is explained in this link
I'll write simple one:
1) using util.promisify to convert callback method to promise:
const fs = require('fs');
const util = require('util');
const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8');
(async () => {
try {
const files = ['file1.txt', 'file2.txt', 'file3.txt'];
for (const file of files) {
console.log(
await readFile(file)
);
}
}
catch (error) {
console.error(error);
}
})();
2) *Sync methods. Since Your code is not dealing with concurrency You can use *Sync methods:
const fs = require('fs');
try {
const files = ['file1.txt', 'file2.txt', 'file3.txt'];
for (const file of files) {
console.log(
fs.readFileSync(file, 'utf8')
);
}
}
catch (error) {
console.error(error);
}
BTW. Here is Your fixed code:
var fs = require('fs');
function readFile(fileName) {
return new Promise((resolve, reject) => {
fs.readFile(fileName, 'utf8', function (error, data) {
if (error) return reject(error);
console.log(fileName)
console.log(data)
resolve();
})
});
}
async function run() {
await readFile('file1.txt');
await readFile('file2.txt');
await readFile('file3.txt');
}
run();
since You're calling readFile and resolve at same async sequence it's being called at same time which is reason of race condition.
You've to wait for callback handling and then resolve it (inside callback scope).
There are a couple options with native node functionality
A) With the fs.promises API
You can use destructuring assignment on import to alias fs.promises as just fs
const { promises: fs } = require("fs");
(async () => {
try {
let file1 = await fs.readFile("file1.txt", "utf-8");
let file2 = await fs.readFile("file2.txt", "utf-8");
} catch (e) {
console.log("e", e);
}
})()
B) With util.promisify API
const fsSync = require("fs");
const {promisify} = require("util")
const fs = {
readdir: promisify(fsSync.readdir),
readFile: promisify(fsSync.readFile),
// etc
};
(async () => {
try {
let file1 = await fs.readFile("file1.txt", "utf-8");
let file2 = await fs.readFile("file2.txt", "utf-8");
} catch (e) {
console.log("e", e);
}
})()
Further Reading
How to read file with async/await properly?
Using filesystem in node.js with async / await
Related
I have the following code example, and I have now ideas on how to resolve this using utils.promisify(); ONLY! Not Promise!
const spawn = child_process.spawn('docker', ['--version']);
spawn.stdout.on('data', (data) => {
process.stdout.write(data);
});
spawn.on('error', () => {
process.stderr.write(error);
process.exit(1);
});
The second code example works fine.
const promisifiedExecFile = promisify(child_process.execFile);
async function test() {
const version = await promisifiedExecFile('docker', ['--version']);
console.log(version);
}
test();
I couldn't quickly find out why the promisify function does now work properly with the spawn function. But you can create your own promisify function like this:
TS
import { spawn as spwn } from 'child_process';
const spawn = (
cmd: string,
args: ReadonlyArray<string>,
) => new Promise((resolve, reject) => {
const cp = spwn(cmd, args);
const error: string[] = [];
const stdout: string[] = [];
cp.stdout.on('data', (data) => {
stdout.push(data.toString());
});
cp.on('error', (e) => {
error.push(e.toString());
});
cp.on('close', () => {
if (error.length) reject(error.join(''));
else resolve(stdout.join(''));
});
});
(async () => {
try {
const stdOut = await spawn('docker', ['--version']);
console.log('stdOut: ', stdOut);
} catch (error) {
console.log('error:', error);
process.exit(1);
}
})();
JS
const { spawn: spwn } = require('child_process');
const spawn = (
cmd,
args,
) => new Promise((resolve, reject) => {
const cp = spwn(cmd, args);
const error = [];
const stdout = [];
cp.stdout.on('data', (data) => {
stdout.push(data.toString());
});
cp.on('error', (e) => {
error.push(e.toString());
});
cp.on('close', () => {
if (error.length) reject(error.join(''));
else resolve(stdout.join(''));
});
});
(async () => {
try {
const stdOut = await spawn('docker', ['--version']);
console.log('stdOut: ', stdOut);
} catch (error) {
console.log('error: ', error);
process.exit(1);
}
})();
Node.js' built-in util package has a promisify() function that converts callback-based functions to promise-based functions. This lets you use promise chaining and async/await with callback-based APIs.
I think that we can't use the promisify() with the spawn() function.
For example we can use promisify() with execFile() instead of spawn():
async asyncExecFile(tool) {
const execFile = util.promisify(child_process.execFile);
return await execFile(tool, ['--version'])
.catch(() => {
this.printError(`The "${tool}" don't exist in the current environment. \n`);
process.exit(0);
});
}
It is not possible because there is nothing to promisify. promisify works on functions where it takes a callback and spawn does not take a callback.
You use spawn by taking the returned ChildProcess then adding listeners to the ChildProcess' readable streams (stdout, stderr, stdio...)
Omar Omeiri's answer is similar to how execFile works inside node itself, so you can just use the promisified execFile instead. (if you need unlimited buffer, pass in maxBuffer: Infinity inside options)
I'm new to Nodejs and i've fetched a json file using require and fs, and using MVC , i have a model.js that has the function that reads the json file, but when the controller invokes the model function, the data is not shown (console.log(data) in the controller, but console.logged in the model.js. Here is my code:
controller.js
exports.renderHomePage = (req, res) => {
apiServerModel.loadTeams()
.then(function (data) {
console.log(data);
console.log("This is inside controller")
res.render("index", { // output as string
teamsList: `${JSON.stringify(data, null, 2)}` // A property called teamList to be displayed on the browser
//teamsList: teamView.TeamsView(`${JSON.stringify(data, null, 2)}`)
})
})
.catch(error => console.log(error))
}
model.js
'use strict';
const fs = require('fs');
class TeamsModel {
static async loadTeams() {
try {
await fs.readFile('./json/prov-nodes.json', (err, rawData) => {
if (err) throw err;
let teams = JSON.parse(rawData);
return teams;
//console.log(teams);
});
} catch (error) {
console.log(error)
}
console.log('This is after the read call');
}
}
exports.TeamsModel = TeamsModel;
first of all you should read about Callbacks VS Promises VS Async/Await in node.js.
when you use async/await, don't have a callback so:
use fs/promises instead of fs, because you used await for fs in your loadTeams function.
'use strict';
const fs = require('fs/promises');
class TeamsModel {
static async loadTeams() {
console.log("hi")
try {
let rawData = await fs.readFile('./json/prov-nodes.json')
let teams = JSON.parse(rawData);
return teams;
} catch (error) {
console.log(error)
}
console.log('This is after the read call');
}
}
module.exports = TeamsModel;
I want to know the how to use setImmediate with async await and handle errors properly. I have written following code. But I am not sure it is adhering to the best practices.
There is a route in my express app
router.get('/parseinvoice', async (req, res, next) => {
try {
const parsedInvoiceResponse = await userhelper.getParseInVoiceList();
res.json({parsedInvoiceResponse})
} catch (error) {
res.json({});
}
});
The userhelper class code
var userhelper = {};
const fs = require('fs'),
path = require('path'),
filePath = path.join(__dirname, './input_user_story_12.txt');
const { promisify } = require('util')
const readFile = promisify(fs.readFile);
userhelper.getParseInVoiceList = async function() {
return new Promise( async (resolve, reject) => {
try {
setImmediate(async function() {
try {
const contents = await readFile(filePath, 'UTF-8');
resolve(contents);
} catch (error) {
reject(error);
}
});
} catch (error) {
reject(error);
}
});
}
module.exports = userhelper;
Although I am getting the response. I am not sure about the setImmediate part, whether the multiple try catch are required. Is there any neat way to write the below code?.
try {
setImmediate(async ()=>{
var res = await readFile(filePath, 'UTF-8');
})
} catch(err) {
}
2.
await setImmediate(()=>{
var res = await readFile(filePath, 'UTF-8');
}).catch(){}
3.
try {
await setImmediate(()=>{
await readFile(filePath, 'UTF-8');
}).catch(){}
} catch() {
}
should return result into res
const res = await setImmediate(()=>{
return readFile(filePath, 'UTF-8');
})
Why are you not just using?
userhelper.getParseInVoiceList = async function() {
return await readFile(filePath, 'UTF-8');
}
Expanding on #Dan D.'s answer, you can await the resolution of an asynchronous setImmediate prior to calling the asynchronous promisified readFile, but I am not sure why you would need to do this without more context.
userhelper.getParseInVoiceList = async function() {
await new Promise((resolve) => setImmediate(() => resolve()));
return await readFile(filePath, 'UTF-8');
}
I have create this "simple pattern" that works for combine Promise and EventEmitter (with nodejs).
But: I'm wondering if there is a better way score a goal?
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emitter.emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
return { promise, emitter };
}
const work = doSomething({ param: 'hello', directory: './' });
work.emitter.on('update-event', data => console.log(`Update ${data}`));
work.promise.then(console.log).catch(console.error);
I was thinking like:
doSomething(...).on(...).then(...)
but I can't figure out how do that.
Node.js has built a function for this: the require('events').once function! Here the PR.
It has been released with Node [v11.13] (https://nodejs.org/en/blog/release/v11.13.0/)
An example usage (from docs):
const { once, EventEmitter } = require('events');
async function run() {
const ee = new EventEmitter();
process.nextTick(() => {
ee.emit('myevent', 42);
});
const [value] = await once(ee, 'myevent');
console.log(value); // 42
const err = new Error('kaboom');
process.nextTick(() => {
ee.emit('error', err);
});
try {
await once(ee, 'myevent');
} catch (err) {
console.log('error happened', err);
}
}
run();
Personally I'm not sure how the accepted answer is related to the OP's question, anyway I think I do have found a rather simple (but maybe not very nice) way of accomplishing the specific doSomething(...).on(...).then(...) thing asked by the OP. Taking the OP's example code, we can just do something like the following:
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
var resolves;
var rejects;
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
resolves = resolve;
rejects = reject;
});
promise.on = emitter.on;
promise.emit = emitter.emit;
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
rejects(err);
return;
}
files.forEach(file => promise.emit('update-event', file));
resolves(`I'm done: ${parameters.param} world`);
});
return promise;
}
const work = doSomething({ param: 'hello', directory: './' });
work.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);
It works for my limited cases so far, and both the event and the promise can be chained without issues as far as I know. There might be problems for more complicated use-cases that I have not encountered yet, but it does serve the purpose of chaining doSomething(...).on(...).then(...) like the OP asked.
No, you shouldn't combine an event emitter and a promise in one object. Returning them separately, just like you did, is fine.
Of course, in your particular example, there's not really a reason to use an event emitter at all. It just fires when the promise fulfills anyway. Much simpler:
const fs = require('fs');
function doSomething(parameters) {
return new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) reject(err);
else resolve(Object.assign(files, parameters));
});
});
}
doSomething({ param: 'hello', directory: './' }).then(files => {
for (const data of files) {
console.log(`Update ${data}`)
}
return `I'm done: ${files.param} world`;
}).then(console.log, console.error);
I would suggest this :
import EventPromised from "event-promised";
function doSomething(parameters) {
return new EventPromised((resolve, reject, emit) => {
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
}
doSomething({ param: 'hello', directory: './' })
.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);
I need some help with my code. I'm new at Node.js and have a lot of trouble with it.
What I'm trying to do:
Fetch a .txt with Amazon products (ASINs) ;
Fetch all products using the amazon-product-api package;
Save each product in a .json file.
My code is not working. I think I messed up with this asynchronous-synchronous stuff - help me!
var amazon = require('amazon-product-api');
var fs = require('fs');
var client = amazon.createClient({
awsId: "XXX",
awsSecret: "XXX",
awsTag: "888"
});
var array = fs.readFileSync('./test.txt').toString().split('\n');
for (var i = 1; i < array.length; i++) {
var ASIN = array[i];
client.itemLookup({
domain: 'webservices.amazon.de',
responseGroup: 'Large',
idType: 'ASIN',
itemId: ASIN
})
.then(function(results) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) {
console.log(err);
} else {
console.log("JSON saved");
}
})
return results;
}).catch(function(err) {
console.log(err);
});
};
As of 2019...
...the correct answer is to use async/await with the native fs promises module included in node. Upgrade to Node.js 10 or 11 (already supported by major cloud providers) and do this:
const fs = require('fs').promises;
// This must run inside a function marked `async`:
const file = await fs.readFile('filename.txt', 'utf8');
await fs.writeFile('filename.txt', 'test');
Do not use third-party packages and do not write your own wrappers, that's not necessary anymore.
No longer experimental
Before Node 11.14.0, you would still get a warning that this feature is experimental, but it works just fine and it's the way to go in the future. Since 11.14.0, the feature is no longer experimental and is production-ready.
What if I prefer import instead of require?
It works, too - but only in Node.js versions where this feature is not marked as experimental.
import { promises as fs } from 'fs';
(async () => {
await fs.writeFile('./test.txt', 'test', 'utf8');
})();
Because fs.writefile is a traditional asynchronous callback - you need to follow the promise spec and return a new promise wrapping it with a resolve and rejection handler like so:
return new Promise(function(resolve, reject) {
fs.writeFile("<filename.type>", data, '<file-encoding>', function(err) {
if (err) reject(err);
else resolve(data);
});
});
So in your code you would use it like so right after your call to .then():
.then(function(results) {
return new Promise(function(resolve, reject) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) reject(err);
else resolve(data);
});
});
}).then(function(results) {
console.log("results here: " + results)
}).catch(function(err) {
console.log("error here: " + err);
});
say
const util = require('util')
const fs_writeFile = util.promisify(fs.writeFile)
https://nodejs.org/api/util.html#util_util_promisify_original
this is less prone to bugs than the top-voted answer
Finally, the latest node.js release v10.3.0 has natively supported fs promises.
const fsPromises = require('fs').promises; // or require('fs/promises') in v10.0.0
fsPromises.writeFile(ASIN + '.json', JSON.stringify(results))
.then(() => {
console.log('JSON saved');
})
.catch(er => {
console.log(er);
});
You can check the official documentation for more details.
https://nodejs.org/api/fs.html#fs_fs_promises_api
If you want to import the promise based version of fs as an ES module you can do:
import { promises as fs } from 'fs'
await fs.writeFile(...)
As soon as node v14 is released (see this PR), you can also use
import { writeFile } from 'fs/promises'
What worked for me was fs.promises.
Example One:
const fs = require("fs")
fs.promises
.writeFile(__dirname + '/test.json', "data", { encoding: 'utf8' })
.then(() => {
// Do whatever you want to do.
console.log('Done');
});
Example Two. Using Async-Await:
const fs = require("fs")
async function writeToFile() {
await fs.promises.writeFile(__dirname + '/test-22.json', "data", {
encoding: 'utf8'
});
console.log("done")
}
writeToFile()
Update Sept 2017: fs-promise has been deprecated in favour of fs-extra.
I haven't used it, but you could look into fs-promise. It's a node module that:
Proxies all async fs methods exposing them as Promises/A+ compatible
promises (when, Q, etc). Passes all sync methods through as values.
Use require('fs/promises')
var fs = require('fs/promises'); // Since 11.14.0
var path = require('path'); // to help us to join better the paths
var content = JSON.stringify(["this is your content"]); // Must be a string to be written.
fs
.writeFile(path.join(__dirname, 'test.json'), content, { encoding: 'utf8' })
.then(() => {
console.log('Write is done!');
});
Example using async/await
var fs = require('fs/promises'); // Since 11.14.0
var path = require('path'); // to help us to join better the paths
var content = JSON.stringify(["this is your content"]); // Must be a string to be written.
(async function autorun(){
await fs.writeFile(path.join(__dirname, 'test.json'), content, { encoding: 'utf8' })
console.log('Write is done!');
})() // This is called a IIFE: Immediately invoked function expression
const util = require('util')
const fs = require('fs');
const fs_writeFile = util.promisify(fs.writeFile)
fs_writeFile('message.txt', 'Hello Node.js')
.catch((error) => {
console.log(error)
});
For easy to use asynchronous convert all callback to promise use some library like "bluebird" .
.then(function(results) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) {
console.log(err);
} else {
console.log("JSON saved");
return results;
}
})
}).catch(function(err) {
console.log(err);
});
Try solution with promise (bluebird)
var amazon = require('amazon-product-api');
var fs = require('fs');
var Promise = require('bluebird');
var client = amazon.createClient({
awsId: "XXX",
awsSecret: "XXX",
awsTag: "888"
});
var array = fs.readFileSync('./test.txt').toString().split('\n');
Promise.map(array, function (ASIN) {
client.itemLookup({
domain: 'webservices.amazon.de',
responseGroup: 'Large',
idType: 'ASIN',
itemId: ASIN
}).then(function(results) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) {
console.log(err);
} else {
console.log("JSON saved");
return results;
}
})
}).catch(function(err) {
console.log(err);
});
});
Use fs.writeFileSync inside the try/catch block as below.
var fs = require('fs');
try {
const file = fs.writeFileSync(ASIN + '.json', JSON.stringify(results))
console.log("JSON saved");
return results;
} catch (error) {
console.log(err);
}