How can i run this if statements synchronously. I have been trying many times but unable to fixed this. (I am nodejs beginner).
i am trying to use async/await here but it is not work.
how can i check first if condition is completed and then second if statement will run!
Please help.
here is my dummy codes:
record1='10';
record2='20';
function main(){
if(record1){
console.log('-------------------------');
console.log('I am record 1')
val='John',
firstJob(val)
}
if(record2){
console.log('I am record 2')
val='Rahul',
firstJob(val)
}
}
async function firstJob(val){
console.log('Hello, I am ' + val)
await secondJob(val);
}
async function secondJob(val){
console.log(val+ ' is a nodeJs beginner!')
await listFiles()
}
function thirdJob(arg){
if (arg='pass'){
console.log('This is end of the one if condition')
}
}
function listFiles (){
return new Promise((resolve, reject) => {
setTimeout(() => {
const path = require('path');
const fs = require('fs');
const { exit } = require('process');
const directoryPath = path.join(__dirname, '../');
console.log('List of Avaialble Files :');
fs.readdir(directoryPath, { withFileTypes: true },function (err, files) {
if (err) {
return console.log('Unable to scan directory: ' + err);
}
files.forEach(function (file) {
if (file.isFile()){
console.log(file);
}
});
});
arg='pass'
thirdJob();
}, 2000)
})
}
main();
The short answer is you can't "make them run synchronously".
You have to patiently wait until they're done to get the answer.
So, without making main async, you have to use the promises the old fashioned way, and sequence the actions using then.
record1='10';
record2='20';
function main(){
Promise.resolve()
.then(() => {
if(record1){
console.log('-------------------------');
console.log('I am record 1');
val='John';
return firstJob(val)
}
})
.then(() => {
if(record2){
console.log('I am record 2')
val='Rahul';
return firstJob(val)
}
});
}
async function firstJob(val){
console.log('Hello, I am ' + val)
await secondJob(val);
}
async function secondJob(val){
console.log(val+ ' is a nodeJs beginner!')
await listFiles()
}
main();
I've just included the snippet for the if and promise stuff. The gist here is that you conditionally chain together your calls to firstJob.
Each call to then allows you to (potentially, it's not required) attach another promise to the execution of the one that just finished. In the snippet above, we're doing that only if the condition is truthy by returning the promise from the calls to firstJob.
By the way, your implementation of listFiles isn't ever going to finish because you never invoke resolve to the promise you made inside the function. This solves the problem by resolving your promise once the looping is done.
function listFiles (){
return new Promise((resolve, reject) => {
setTimeout(() => {
const path = require('path');
const fs = require('fs');
const { exit } = require('process');
const directoryPath = path.join(__dirname, '../');
console.log('List of Avaialble Files :');
fs.readdir(directoryPath, { withFileTypes: true },function (err, files) {
if (err) {
console.log('Unable to scan directory: ' + err);
reject(err);
}
files.forEach(function (file) {
if (file.isFile()){
console.log(file);
}
});
resolve();
});
arg='pass'
thirdJob();
}, 2000)
})
}
Note the added call to resolve once you've completed your loop.
I also added a call to reject in the case that readdir returned an error, since that is the proper way to propagate it with your manual promise.
A few more pointers, generally modules are required once at the top of the file, instead of dynamically inside of a function. The penalty for doing it that way you have is not bad, there is a cache for required modules. It's just not idiomatic.
if (arg='pass'){
Doesn't do any equality check, that's an assignment, you need ==, or === if you want to check for equality.
Related
I am going crazy trying to fix this bug so please help :-)
I am using https://pdfkit.org/
This creates a stream that when finished is piped to fs.createWriteStream
My issue is the first time the code runs this works and the PDF is generated.
The next time the Code runs a file with Zero Bytes is created.
I am calling the function from an API running on express.
The issue appears to be the async nature of fs.createWriteStream.
The stream finishes after the API has returned. I cannnot seem to find a way to block while confirming the file has been created.
What is odd is that the first time the code works run again it fails:
Here is the Pipe Function;
async function _writeFile(fileObj) {
let fileStream = fs.createWriteStream(fileObj.fileName)
pipeline(
doc,
fileStream,
async (err) => {
if (err) {
console.error('PDF failed', err);
return ('Pipeline failed', err)
} else {
console.log('PDF succeeded');
}
}
)
}
This is called from:
exports.drawReport = async (payload) => {
var date = new Date();
const timeStamp = date.toJSON();
let path = './controllers/tmp/'
var fileName = path + timeStamp + '.' + payload.type + '.pdf'
try {
// Start Report
await _startReport(payload)
// Check Starting position on page & add status box header
if (device_card_reference == 260) {
await _deviceTitle(payload);
}
// Add Devices
await _reportDevice(payload);
// Call Footer for final page
await _reportFooter()
console.log("PDF Done - Writing File")
// File Meta Data
let fileObj = {
type: payload.type,
siteId: payload.siteId,
fileName: fileName,
timeStamp: timeStamp
}
// Create file to store PDF
await _writeFile(fileObj)
doc.end()
console.log("PDF MADE?")
return (fileObj)
} catch (err) {
console.error('MakePDF ERROR: ' + err.message);
return (err.message)
}
}
pipeline runs asynchronously, so it's not awaited, which is why doc.end() runs before the file is done
try wrapping pipeline in a promise, and then resolve when the stream is done:
// function that returns a promise
function _writeFile(fileObj) {
return new Promise((resolve, reject) => {
const fileStream = fs.createWriteStream(fileObj.fileName);
pipeline(
doc,
fileStream,
async(err) => {
if (err) {
console.error('PDF failed', err);
// err, handle in `.catch`
reject({res:'Pipeline failed', err});
} else {
console.log('PDF succeeded');
// done, resolve, to move to doc.end
resolve('PDF succeeded');
}
}
)
});
}
add .catch() to handle error:
// Create file to store PDF
await _writeFile(fileObj).catch(err => console.log(err));
or even better, use stream promises API
const {pipeline } = require('stream/promises');
async function _writeFile(fileObj) {
const fileStream = fs.createWriteStream(fileObj.fileName);
await pipeline(doc, fileStream);
console.log('PDF succeeded');
}
I have this nodejs code here which read a folder and process the file. The code works. But it is still printing all the file name first, then only read the file. How do I get a file and then read a content of the file first and not getting all the files first?
async function readingDirectory(directory) {
try {
fileNames = await fs.readdir(directory);
fileNames.map(file => {
const absolutePath = path.resolve(folder, file);
log(absolutePath);
fs.readFile(absolutePath, (err, data) => {
log(data); // How to make it async await here?
});
});
} catch {
console.log('Directory Reading Error');
}
}
readingDirectory(folder);
To use await, you need to use promise versions of fs.readFile() and fs.readdir() which you can get on fs.promises and if you want these to run sequentially, then use a for loop instead of .map():
async function readingDirectory(directory) {
const fileNames = await fs.promises.readdir(directory);
for (let file of fileNames) {
const absolutePath = path.join(directory, file);
log(absolutePath);
const data = await fs.promises.readFile(absolutePath);
log(data);
}
}
readingDirectory(folder).then(() => {
log("all done");
}).catch(err => {
log(err);
});
I am trying to create a script to download pages from multiple urls using node js but the loop didn't want to wait for the request to finish and continued printing, I also got a hint to use the async for loop, but still it didn't work.
here's my code
function GetPage(url){
console.log(` Downloading page ${url}`);
request({
url: `${url}`
},(err,res,body) => {
if(err) throw err;
console.log(` Writing html to file` );
fs.writeFile(`${url.split('/').slice(-1)[0]}`,`${body}`,(err) => {
if(err) throw err;
console.log('saved');
});
});
}
var list = [ 'https://www.someurl.com/page1.html', 'https://www.someurl.com/page2.html', 'https://www.someurl.com/page3.html' ]
const main = async () => {
for(let i = 0; i < list.length; i++){
console.log(` processing ${list[i]}`);
await GetPage(list[i]);
}
};
main().catch(console.error);
Output :
processing https://www.someurl.com/page1.html
Downloading page https://www.someurl.com/page1.html
processing https://www.someurl.com/page2.html
Downloading page https://www.someurl.com/page2.html
processing https://www.someurl.com/page3.html
Downloading page https://www.someurl.com/page3.html
Writing html to file
Writing html to file
saved
saved
Writing html to file
saved
There are a couple of problems with your code.
You are mixing code that uses the callback style programming and code that should be using promises. Also, your getPage function is not async (it doesn't return a promise) so you cannot await on it.
You just have to return a promise from your getPage() function, and correctly resolve it or reject it.
function getPage(url) {
return new Promise((resolve, reject) => {
console.log(` Downloading page ${url}`);
request({ url: `${url}` }, (err, res, body) => {
if (err) reject(err);
console.log(` Writing html to file`);
fs.writeFile(`${url.replace(/\//g,'-')}.html`, `${body}`, (writeErr) => {
if (writeErr) reject(writeErr);
console.log("saved");
resolve();
});
});
});
}
You don't have to change your main() function loop will await for the getPage() function.
For loop doesn't wait for callback to be finished, it will continue executing it. You need to turn either getPage function to promise or use Promise.all as shown below.
var list = [
"https://www.someurl.com/page1.html",
"https://www.someurl.com/page2.html",
"https://www.someurl.com/page3.html",
];
function getPage(url) {
return new Promise((resolve, reject) => {
console.log(` Downloading page ${url}`);
request({ url: `${url}` }, (err, res, body) => {
if (err) reject(err);
console.log(` Writing html to file`);
fs.writeFile(`${url}.html`, `${body}`, (writeErr) => {
if (writeErr) reject(writeErr);
console.log("saved");
resolve();
});
});
});
}
const main = async () => {
return new Promise((resolve, reject) => {
let promises = [];
list.map((path) => promises.push(getPage(path)));
Promise.all(promises).then(resolve).catch(reject);
});
};
main().catch(console.error);
GetPage() is not built around promises and doesn't even return a promise so await on its result does NOTHING. await has no magic powers. It awaits a promise. If you don't give it a promise that properly resolves/rejects when your async operation is done, then the await does nothing. Your GetPage() function returns nothing so the await has nothing to do.
What you need is to fix GetPage() so it returns a promise that is properly tied to your asynchronous result. Because the request() library has been deprecated and is no longer recommended for new projects and because you need a promise-based solution anyway so you can use await with it, I'd suggest you switch to one of the alternative promise-based libraries recommended here. My favorite from that list is got(), but you can choose whichever one you like best. In addition, you can use fs.promises.writeFile() for promise-based file writing.
Here's how that code would look like using got():
const got = require('got');
const { URL } = require('url');
const path = require('path');
const fs = require('fs');
function getPage(url) {
console.log(` Downloading page ${url}`);
return got(url).text().then(data => {
// can't just use an URL for your filename as it contains potentially illegal
// characters for the file system
// so, add some code to create a sanitized filename here
// find just the root filename in the URL
let urlObj = new URL(url);
let filename = path.basename(urlObj.pathname);
if (!filename) {
filename = "index.html";
}
let extension = path.extname(filename);
if (!extension) {
filename += ".html";
} else if (extension === ".") {
filename += "html";
}
console.log(` Writing file ${filename}`)
return fs.promises.writeFile(filename, data);
});
}
const list = ['https://www.someurl.com/page1.html', 'https://www.someurl.com/page2.html', 'https://www.someurl.com/page3.html'];
async function main() {
for (let url of list) {
console.log(` processing ${url}`);
await getPage(url);
}
}
main().then(() => {
console.log("all done");
}).catch(console.error);
If you put real URLs in the array, this is directly runnable in nodejs. I ran it myself with my own URLs.
Summary of Changes and Improvements:
Switched from request() to got() because it's promise-based and not deprecated.
Modified getPage() to return a promise that represents the asynchronous operations in the function.
Switched to fs.promises.writeFile() so we are using only promises for asynchronous control-flow.
Added legal filename generation from the base path of the URL since you can't just use a full URL as a filename (at least in some file systems).
Switched to simpler for/of loop
I created some functions containing MongoDB methods in one File. It works well when I access the function from the same file, but when I am trying to access the function from another file, it doesn't work.
Here is the code
const Chain = require('../database/models/chains')
const getlatestChain = async () => {
try {
const thechains = await Chain.countDocuments()
if (thechains < 2) {
throw new Error('there is only one chain!')
}
return thechains
} catch (error) {
return error
}
}
module.exports = {
getlatestChain: getlatestChain
}
It doesn't work when I call it from another file
const thechain = require('../src/utils/chain')
require('../src/database/database')
thechain.getlatestChain()
.then((result) => {
console.log('how many documents : ' + result)
}).catch((err) => {
console.log(err)
});
error
TypeError: Chain.countDocuments is not a function
check the chains model to make sure you are exporting the countDocuments function, check the spelling as well if it is exported
In the docs it shows two versions of readdir and stat. Both of which have an async and sync version readir/readdirSync and stat/statSync.
Because readidir and stat are async I would expect them to return a Promise but when trying to use async/await the script doesnt wait for readdir to resolve and if I use .then/.catch I get an error cannot read .then of undefined.
All I'm trying to do here is map the directories that exist inside of the directory the script is being ran inside of to the dirsOfCurrentDir map.
Returns error cannot read .then of undefined
const fs = require('fs');
const directory = `${ __dirname }/${ process.argv[2] }`;
const dirsOfCurrentDir = new Map();
fs.readdir(directory, (err, files) => {
let path;
if (err)
return console.log(err);
files.forEach(file => {
path = directory + file;
fs.stat(path, (err, stats) => {
if (err)
return console.log(err);
dirsOfCurrentDir.set(file, directory);
});
});
}).then(() => console.log('adasdasd'))
console.log(dirsOfCurrentDir)
Returns Map {}
const foo = async () => {
await fs.readdir(directory, (err, files) => {
let path;
if (err)
return console.log(err);
files.forEach(file => {
path = directory + file;
fs.stat(path, (err, stats) => {
if (err)
return console.log(err);
dirsOfCurrentDir.set(file, directory);
});
});
});
};
foo()
console.log(dirsOfCurrentDir)
Edit
I ended up going with the synchronous versions of both of these functions readdirSync and statSync. While I would feel better using the async methods or promisify I still have not figured out how to get my code working correctly using either.
const fs = require('fs');
const directory = `${ __dirname }/${ process.argv[2] }`;
const dirsOfCurrentDir = new Map();
const dirContents = fs.readdirSync(directory);
dirContents.forEach(file => {
const path = directory + file;
const stats = fs.statSync(path);
if (stats.isDirectory())
dirsOfCurrentDir.set(file, path);
});
console.log(dirsOfCurrentDir); // logs out the map with all properties set
Because readidir and stat are async I would expect them to return a Promise
First off, make sure you know the difference between an asynchronous function and an async function. A function declared as async using that specific keyword in Javascript such as:
async function foo() {
...
}
does always return a promise (per the definition of a function declared with the async keyword).
But an asynchronous function such as fs.readdir() may or may not return a promise, depending upon its internal design. In this particular case, the original implementation of the fs module in node.js only uses callbacks, not promises (its design predates the existence of promises in node.js). Its functions are asynchronous, but not declared as async and thus it uses regular callbacks, not promises.
So, you have to either use the callbacks or "promisify" the interface to convert it into something that returns a promise so you can use await with it.
There is an experimental interface in node.js v10 that offers built-in promises for the fs module.
const fsp = require('fs').promises;
fsp.readdir(...).then(...)
There are lots of options for promisifying functions in an earlier version of node.js. You can do it function by function using util.promisify():
const promisify = require('util').promisify;
const readdirP = promisify(fs.readdir);
const statP = promisify(fs.stat);
Since I'm not yet developing on node v10, I often use the Bluebird promise library and promisify the whole fs library at once:
const Promise = require('bluebird');
const fs = Promise.promisifyAll(require('fs'));
fs.readdirAsync(...).then(...)
To just list the sub-directories in a given directory, you could do this:
const fs = require('fs');
const path = require('path');
const promisify = require('util').promisify;
const readdirP = promisify(fs.readdir);
const statP = promisify(fs.stat);
const root = path.join(__dirname, process.argv[2]);
// utility function for sequencing through an array asynchronously
function sequence(arr, fn) {
return arr.reduce((p, item) => {
return p.then(() => {
return fn(item);
});
}, Promise.resolve());
}
function listDirs(rootDir) {
const dirsOfCurrentDir = new Map();
return readdirP(rootDir).then(files => {
return sequence(files, f => {
let fullPath = path.join(rootDir, f);
return statP(fullPath).then(stats => {
if (stats.isDirectory()) {
dirsOfCurrentDir.set(f, rootDir)
}
});
});
}).then(() => {
return dirsOfCurrentDir;
});
}
listDirs(root).then(m => {
for (let [f, dir] of m) {
console.log(f);
}
});
Here's a more general implementation that lists files and offers several options for both what to list and how to present the results:
const fs = require('fs');
const path = require('path');
const promisify = require('util').promisify;
const readdirP = promisify(fs.readdir);
const statP = promisify(fs.stat);
const root = path.join(__dirname, process.argv[2]);
// options takes the following:
// recurse: true | false - set to true if you want to recurse into directories (default false)
// includeDirs: true | false - set to true if you want directory names in the array of results
// sort: true | false - set to true if you want filenames sorted in alpha order
// results: can have any one of the following values
// "arrayOfFilePaths" - return an array of full file path strings for files only (no directories included in results)
// "arrayOfObjects" - return an array of objects {filename: "foo.html", rootdir: "//root/whatever", full: "//root/whatever/foo.html"}
// results are breadth first
// utility function for sequencing through an array asynchronously
function sequence(arr, fn) {
return arr.reduce((p, item) => {
return p.then(() => {
return fn(item);
});
}, Promise.resolve());
}
function listFiles(rootDir, opts = {}, results = []) {
let options = Object.assign({recurse: false, results: "arrayOfFilePaths", includeDirs: false, sort: false}, opts);
function runFiles(rootDir, options, results) {
return readdirP(rootDir).then(files => {
let localDirs = [];
if (options.sort) {
files.sort();
}
return sequence(files, fname => {
let fullPath = path.join(rootDir, fname);
return statP(fullPath).then(stats => {
// if directory, save it until after the files so the resulting array is breadth first
if (stats.isDirectory()) {
localDirs.push({name: fname, root: rootDir, full: fullPath, isDir: true});
} else {
results.push({name: fname, root: rootDir, full: fullPath, isDir: false});
}
});
}).then(() => {
// now process directories
if (options.recurse) {
return sequence(localDirs, obj => {
// add directory to results in place right before its files
if (options.includeDirs) {
results.push(obj);
}
return runFiles(obj.full, options, results);
});
} else {
// add directories to the results (after all files)
if (options.includeDirs) {
results.push(...localDirs);
}
}
});
});
}
return runFiles(rootDir, options, results).then(() => {
// post process results based on options
if (options.results === "arrayOfFilePaths") {
return results.map(item => item.full);
} else {
return results;
}
});
}
// get flat array of file paths,
// recursing into directories,
// each directory sorted separately
listFiles(root, {recurse: true, results: "arrayOfFilePaths", sort: true, includeDirs: false}).then(list => {
for (const f of list) {
console.log(f);
}
}).catch(err => {
console.log(err);
});
You can copy this code into a file and run it, passing . as an argument to list the directory of the script or any subdirectory name you want to list.
If you wanted fewer options (such as no recursion or directory order not preserved), this code could be reduced significantly and perhaps made a little faster (run some async operations in parallel).