fs.writeFile in a promise, asynchronous-synchronous stuff - node.js

I need some help with my code. I'm new at Node.js and have a lot of trouble with it.
What I'm trying to do:
Fetch a .txt with Amazon products (ASINs) ;
Fetch all products using the amazon-product-api package;
Save each product in a .json file.
My code is not working. I think I messed up with this asynchronous-synchronous stuff - help me!
var amazon = require('amazon-product-api');
var fs = require('fs');
var client = amazon.createClient({
awsId: "XXX",
awsSecret: "XXX",
awsTag: "888"
});
var array = fs.readFileSync('./test.txt').toString().split('\n');
for (var i = 1; i < array.length; i++) {
var ASIN = array[i];
client.itemLookup({
domain: 'webservices.amazon.de',
responseGroup: 'Large',
idType: 'ASIN',
itemId: ASIN
})
.then(function(results) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) {
console.log(err);
} else {
console.log("JSON saved");
}
})
return results;
}).catch(function(err) {
console.log(err);
});
};

As of 2019...
...the correct answer is to use async/await with the native fs promises module included in node. Upgrade to Node.js 10 or 11 (already supported by major cloud providers) and do this:
const fs = require('fs').promises;
// This must run inside a function marked `async`:
const file = await fs.readFile('filename.txt', 'utf8');
await fs.writeFile('filename.txt', 'test');
Do not use third-party packages and do not write your own wrappers, that's not necessary anymore.
No longer experimental
Before Node 11.14.0, you would still get a warning that this feature is experimental, but it works just fine and it's the way to go in the future. Since 11.14.0, the feature is no longer experimental and is production-ready.
What if I prefer import instead of require?
It works, too - but only in Node.js versions where this feature is not marked as experimental.
import { promises as fs } from 'fs';
(async () => {
await fs.writeFile('./test.txt', 'test', 'utf8');
})();

Because fs.writefile is a traditional asynchronous callback - you need to follow the promise spec and return a new promise wrapping it with a resolve and rejection handler like so:
return new Promise(function(resolve, reject) {
fs.writeFile("<filename.type>", data, '<file-encoding>', function(err) {
if (err) reject(err);
else resolve(data);
});
});
So in your code you would use it like so right after your call to .then():
.then(function(results) {
return new Promise(function(resolve, reject) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) reject(err);
else resolve(data);
});
});
}).then(function(results) {
console.log("results here: " + results)
}).catch(function(err) {
console.log("error here: " + err);
});

say
const util = require('util')
const fs_writeFile = util.promisify(fs.writeFile)
https://nodejs.org/api/util.html#util_util_promisify_original
this is less prone to bugs than the top-voted answer

Finally, the latest node.js release v10.3.0 has natively supported fs promises.
const fsPromises = require('fs').promises; // or require('fs/promises') in v10.0.0
fsPromises.writeFile(ASIN + '.json', JSON.stringify(results))
.then(() => {
console.log('JSON saved');
})
.catch(er => {
console.log(er);
});
You can check the official documentation for more details.
https://nodejs.org/api/fs.html#fs_fs_promises_api

If you want to import the promise based version of fs as an ES module you can do:
import { promises as fs } from 'fs'
await fs.writeFile(...)
As soon as node v14 is released (see this PR), you can also use
import { writeFile } from 'fs/promises'

What worked for me was fs.promises.
Example One:
const fs = require("fs")
fs.promises
.writeFile(__dirname + '/test.json', "data", { encoding: 'utf8' })
.then(() => {
// Do whatever you want to do.
console.log('Done');
});
Example Two. Using Async-Await:
const fs = require("fs")
async function writeToFile() {
await fs.promises.writeFile(__dirname + '/test-22.json', "data", {
encoding: 'utf8'
});
console.log("done")
}
writeToFile()

Update Sept 2017: fs-promise has been deprecated in favour of fs-extra.
I haven't used it, but you could look into fs-promise. It's a node module that:
Proxies all async fs methods exposing them as Promises/A+ compatible
promises (when, Q, etc). Passes all sync methods through as values.

Use require('fs/promises')
var fs = require('fs/promises'); // Since 11.14.0
var path = require('path'); // to help us to join better the paths
var content = JSON.stringify(["this is your content"]); // Must be a string to be written.
fs
.writeFile(path.join(__dirname, 'test.json'), content, { encoding: 'utf8' })
.then(() => {
console.log('Write is done!');
});
Example using async/await
var fs = require('fs/promises'); // Since 11.14.0
var path = require('path'); // to help us to join better the paths
var content = JSON.stringify(["this is your content"]); // Must be a string to be written.
(async function autorun(){
await fs.writeFile(path.join(__dirname, 'test.json'), content, { encoding: 'utf8' })
console.log('Write is done!');
})() // This is called a IIFE: Immediately invoked function expression

const util = require('util')
const fs = require('fs');
const fs_writeFile = util.promisify(fs.writeFile)
fs_writeFile('message.txt', 'Hello Node.js')
.catch((error) => {
console.log(error)
});

For easy to use asynchronous convert all callback to promise use some library like "bluebird" .
.then(function(results) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) {
console.log(err);
} else {
console.log("JSON saved");
return results;
}
})
}).catch(function(err) {
console.log(err);
});
Try solution with promise (bluebird)
var amazon = require('amazon-product-api');
var fs = require('fs');
var Promise = require('bluebird');
var client = amazon.createClient({
awsId: "XXX",
awsSecret: "XXX",
awsTag: "888"
});
var array = fs.readFileSync('./test.txt').toString().split('\n');
Promise.map(array, function (ASIN) {
client.itemLookup({
domain: 'webservices.amazon.de',
responseGroup: 'Large',
idType: 'ASIN',
itemId: ASIN
}).then(function(results) {
fs.writeFile(ASIN + '.json', JSON.stringify(results), function(err) {
if (err) {
console.log(err);
} else {
console.log("JSON saved");
return results;
}
})
}).catch(function(err) {
console.log(err);
});
});

Use fs.writeFileSync inside the try/catch block as below.
var fs = require('fs');
try {
const file = fs.writeFileSync(ASIN + '.json', JSON.stringify(results))
console.log("JSON saved");
return results;
} catch (error) {
console.log(err);
}

Related

In Node js, why is my controller.js not showing the data after invoking a model.js function

I'm new to Nodejs and i've fetched a json file using require and fs, and using MVC , i have a model.js that has the function that reads the json file, but when the controller invokes the model function, the data is not shown (console.log(data) in the controller, but console.logged in the model.js. Here is my code:
controller.js
exports.renderHomePage = (req, res) => {
apiServerModel.loadTeams()
.then(function (data) {
console.log(data);
console.log("This is inside controller")
res.render("index", { // output as string
teamsList: `${JSON.stringify(data, null, 2)}` // A property called teamList to be displayed on the browser
//teamsList: teamView.TeamsView(`${JSON.stringify(data, null, 2)}`)
})
})
.catch(error => console.log(error))
}
model.js
'use strict';
const fs = require('fs');
class TeamsModel {
static async loadTeams() {
try {
await fs.readFile('./json/prov-nodes.json', (err, rawData) => {
if (err) throw err;
let teams = JSON.parse(rawData);
return teams;
//console.log(teams);
});
} catch (error) {
console.log(error)
}
console.log('This is after the read call');
}
}
exports.TeamsModel = TeamsModel;
first of all you should read about Callbacks VS Promises VS Async/Await in node.js.
when you use async/await, don't have a callback so:
use fs/promises instead of fs, because you used await for fs in your loadTeams function.
'use strict';
const fs = require('fs/promises');
class TeamsModel {
static async loadTeams() {
console.log("hi")
try {
let rawData = await fs.readFile('./json/prov-nodes.json')
let teams = JSON.parse(rawData);
return teams;
} catch (error) {
console.log(error)
}
console.log('This is after the read call');
}
}
module.exports = TeamsModel;

Node.js read file using async/await

I am experimenting with async/await code to read file.
Here's my code:
var fs = require('fs');
function readFile(fileName) {
return new Promise(resolve => {
//console.log(test);
fs.readFile(fileName, 'utf8', function (err, data) {
if (err) throw err;
console.log(fileName)
console.log(data)
})
resolve();
});
}
async function run() {
await readFile('file1.txt');
await readFile('file2.txt');
readFile('file3.txt');
}
run();
But the result is still random. It means file3 sometime read before file2. Where am I doing wrong?
There are many ways to achieve that.
Most of them is explained in this link
I'll write simple one:
1) using util.promisify to convert callback method to promise:
const fs = require('fs');
const util = require('util');
const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8');
(async () => {
try {
const files = ['file1.txt', 'file2.txt', 'file3.txt'];
for (const file of files) {
console.log(
await readFile(file)
);
}
}
catch (error) {
console.error(error);
}
})();
2) *Sync methods. Since Your code is not dealing with concurrency You can use *Sync methods:
const fs = require('fs');
try {
const files = ['file1.txt', 'file2.txt', 'file3.txt'];
for (const file of files) {
console.log(
fs.readFileSync(file, 'utf8')
);
}
}
catch (error) {
console.error(error);
}
BTW. Here is Your fixed code:
var fs = require('fs');
function readFile(fileName) {
return new Promise((resolve, reject) => {
fs.readFile(fileName, 'utf8', function (error, data) {
if (error) return reject(error);
console.log(fileName)
console.log(data)
resolve();
})
});
}
async function run() {
await readFile('file1.txt');
await readFile('file2.txt');
await readFile('file3.txt');
}
run();
since You're calling readFile and resolve at same async sequence it's being called at same time which is reason of race condition.
You've to wait for callback handling and then resolve it (inside callback scope).
There are a couple options with native node functionality
A) With the fs.promises API
You can use destructuring assignment on import to alias fs.promises as just fs
const { promises: fs } = require("fs");
(async () => {
try {
let file1 = await fs.readFile("file1.txt", "utf-8");
let file2 = await fs.readFile("file2.txt", "utf-8");
} catch (e) {
console.log("e", e);
}
})()
B) With util.promisify API
const fsSync = require("fs");
const {promisify} = require("util")
const fs = {
readdir: promisify(fsSync.readdir),
readFile: promisify(fsSync.readFile),
// etc
};
(async () => {
try {
let file1 = await fs.readFile("file1.txt", "utf-8");
let file2 = await fs.readFile("file2.txt", "utf-8");
} catch (e) {
console.log("e", e);
}
})()
Further Reading
How to read file with async/await properly?
Using filesystem in node.js with async / await

Combine Promise and EventEmitter

I have create this "simple pattern" that works for combine Promise and EventEmitter (with nodejs).
But: I'm wondering if there is a better way score a goal?
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emitter.emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
return { promise, emitter };
}
const work = doSomething({ param: 'hello', directory: './' });
work.emitter.on('update-event', data => console.log(`Update ${data}`));
work.promise.then(console.log).catch(console.error);
I was thinking like:
doSomething(...).on(...).then(...)
but I can't figure out how do that.
Node.js has built a function for this: the require('events').once function! Here the PR.
It has been released with Node [v11.13] (https://nodejs.org/en/blog/release/v11.13.0/)
An example usage (from docs):
const { once, EventEmitter } = require('events');
async function run() {
const ee = new EventEmitter();
process.nextTick(() => {
ee.emit('myevent', 42);
});
const [value] = await once(ee, 'myevent');
console.log(value); // 42
const err = new Error('kaboom');
process.nextTick(() => {
ee.emit('error', err);
});
try {
await once(ee, 'myevent');
} catch (err) {
console.log('error happened', err);
}
}
run();
Personally I'm not sure how the accepted answer is related to the OP's question, anyway I think I do have found a rather simple (but maybe not very nice) way of accomplishing the specific doSomething(...).on(...).then(...) thing asked by the OP. Taking the OP's example code, we can just do something like the following:
const { EventEmitter } = require('events');
const fs = require('fs');
function doSomething(parameters) {
var resolves;
var rejects;
const emitter = new EventEmitter();
const promise = new Promise((resolve, reject) => {
resolves = resolve;
rejects = reject;
});
promise.on = emitter.on;
promise.emit = emitter.emit;
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) {
rejects(err);
return;
}
files.forEach(file => promise.emit('update-event', file));
resolves(`I'm done: ${parameters.param} world`);
});
return promise;
}
const work = doSomething({ param: 'hello', directory: './' });
work.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);
It works for my limited cases so far, and both the event and the promise can be chained without issues as far as I know. There might be problems for more complicated use-cases that I have not encountered yet, but it does serve the purpose of chaining doSomething(...).on(...).then(...) like the OP asked.
No, you shouldn't combine an event emitter and a promise in one object. Returning them separately, just like you did, is fine.
Of course, in your particular example, there's not really a reason to use an event emitter at all. It just fires when the promise fulfills anyway. Much simpler:
const fs = require('fs');
function doSomething(parameters) {
return new Promise((resolve, reject) => {
// DO DIRTY JOB
fs.readdir(parameters.directory, (err, files) => {
if (err) reject(err);
else resolve(Object.assign(files, parameters));
});
});
}
doSomething({ param: 'hello', directory: './' }).then(files => {
for (const data of files) {
console.log(`Update ${data}`)
}
return `I'm done: ${files.param} world`;
}).then(console.log, console.error);
I would suggest this :
import EventPromised from "event-promised";
function doSomething(parameters) {
return new EventPromised((resolve, reject, emit) => {
fs.readdir(parameters.directory, (err, files) => {
if (err) {
reject(err);
return;
}
files.forEach(file => emit('update-event', file));
resolve(`I'm done: ${parameters.param} world`);
});
});
}
doSomething({ param: 'hello', directory: './' })
.on('update-event', data => console.log(`Update ${data}`))
.then(console.log)
.catch(console.error);

Node.js | callback is not defined

I'm new to Node.js and I'm trying to understand callback. I was wondering what's wrong with my code. I was expecting that it will display all files without their extensions in the directory but all I got is undefined message.
'use strict';
const fs = require('fs');
const postsDirectory = './app/posts';
function listPosts(callback) {
let posts = [];
fs.readdir(postsDirectory, function(err, files) {
if (err) {
callback(err);
} else {
files.forEach(file => {
posts.push(file.split('.').slice(0, -1).join('.'));
});
callback(posts);
};
});
};
console.log(listPosts());
process.exit(0)
Expected Output:
file1
file2
file3
Use callback in following way
'use strict';
const fs = require('fs');
const postsDirectory = './app/posts';
function listPosts(callback) {
let posts = [];
fs.readdir(postsDirectory, function(err, files) {
if (err) {
return callback(err);
} else {
files.forEach(file => {
posts.push(file.split('.').slice(0, -1).join('.'));
});
//check before return here
console.log("postttttttttttttttttt", posts);
return callback(null, posts);
};
});
};
//Use callback function with params
listPosts(function(err,result){
if(err){
console.log(err)
}else{
console.log(result)
}
});
process.exit(0)

Avoiding callback hell

I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}

Resources