Node.js asynchronous functions not working - node.js

I have a function (lets call it f1) that calls another function (lets call it f2) inside it. The issue is that f2 is supposed to return a parsed json file output. But it always return undefined.
async function f1(){
var customerDataArray = []
fs.readdirSync(jsonFolder).forEach(file => {
var temp = path.extname(file)
if (temp == '.json'){
customerDataArray.push(file)
}
});
let templateFiles = customerDataArray
console.log(templateFiles)
for(let tfc=0; tfc < templateFiles.length ; tfc++){
let fileName = templateFiles[tfc]
let filePath = './fashion/test/' + fileName
const templateJSON = await f2(filePath)
console.log(templateJSON)
}
console.log(templateJSON)
}
async function f2(filePath) {
fs.readFile(filePath, "utf8", (err, jsonString) => {
if (err) {
console.log("Error reading file from disk:", err);
return;
}
try {
const customer = JSON.parse(jsonString);
return customer
} catch (err) {
console.log("Error parsing JSON string:", err);
}
});
}
You can ignore the other details of the file. I am able to display the parsed file in f2 using console.log(customer). But when it is returned to f1, temmplateJSON is undefined.
Any help is appreciated!

You can't mix and match callback-based async with promise-based (async/await) async.
Generally, you can promisify callback-based functions, but for fs, there's fs/promises already (since Node 10).
Your code can thus be simplified to something like this. It won't read the files in parallel, though; if you had very many files, it'd be easy to cause "Too many open files" without adding something like p-queue or p-limit into the mix.
const fsp = require("fs/promises");
async function f1() {
const files = await fsp.readdir(jsonFolder);
const customerDataArray = files.filter((f) => path.extname(f) == ".json");
for (let i = 0; i < customerDataArray.length; i++) {
const filePath = "./fashion/test/" + templateFiles[i];
const templateJSON = await f2(filePath);
console.log(templateJSON);
}
}
async function f2(filePath) {
const content = await fsp.readFile(filePath, "utf8");
return JSON.parse(content);
}

Related

Async function returns pending promise node js

This is a very easy question but no google search results return the correct answer.
const express = require("express");
const app = express();
const cors = require("cors");
const pool = require("./db");
const poolec2 = require("./db");
require("./function")();
returnMappings = async function(connection){
try {
let mapping = await connection.query("SELECT ticker FROM mappings");
let results = await mapping.rows;
//console.log(results);
return results;
} catch (err) {
console.error(err.message);
}
};
const mappings = returnMappings(poolec2);
console.log(mappings);
What am I missing here that is not returning my data? When I unquote console.log(results); I can see the desired results in my terminal. I've tried various versions of using .then but have not had any success return results.then;, const mappings = returnMappings(poolec2).then;, console.log(mappings.then);. I've also tried returning my results outside of my try catch with no luck. I'm really stuck trying to understand how I go about returning a from an async function.
EDIT
The goal is to pass the results from the above async function to another function to check if the a user inputted value exists in that vector of mappings. So indexOf based on a user input, I then use if else to return true or false. With the final results being either true or false.
checkMappings = function(string,input){
stringArray = string;
value = stringArray.indexOf(input);
if(value > -1){
return false
}else{
return true
}
};
SOLUTION
returnMappings = async function(connection,input){
try {
const mapping = await connection.query("SELECT ticker FROM mappings_production");
const results = await mapping.rows;
//console.log(results);
return results;
} catch (err) {
console.error(err.message);
}
};
checkMappings = function(string,input){
let stringArray = JSON.stringify(string);
let value = stringArrayC1.indexOf(input);
function test(a) {
let check;
if(a > -1) {
return true
}else {
return false
}
};
console.log(test(value));
return test(value);
};
const resMappingCheck = returnMappings(poolec2).then((mappings) => checkMappings(mappings,"AAPL"));
console.log(resMappingCheck);
this worked for what I needed to do
As others have pointed out, await can only be used in an async function, but using .then() is functionally equivalent.
This syntax that should work for you:
returnMappings(poolec2).then((mappings) => console.log(mappings));
if you want to do something more elaborate / multi-line, you can use curly braces like so:
returnMappings(poolec2).then((mappings) => {
console.log(mappings)
});
UPDATE:
If you want to chain two functions together, then you'll need to start with the .then() pattern: you can declare the callback function in .then() to be asynchronous. At that point, you can start to use await like you're used to.
I'm not sure what relationship you're trying to create between returnMappings() and checkMappings(), but you can chain them together like this: (note the use of async on the first line to allow the use of await inside the callback.)
returnMappings('test').then(async (mapping) => {
const checkResult = await checkMappings(mapping)
console.log(`checkMapping result: ${checkResult}`)
}).catch((err) => console.log(`Error: ${err}`))
Try this:
const mappings = await returnMappings(poolec2);
That will work if you wrap the code inside an async function. Or you could do:
let mappings;
returnMappings(poolec2).then(res => {
mappings = res;
});

Break from async function in await call

Okay, so I'm working with data from Memcache using a promise based library but the issue I'm having is I don't know a way to break from the async call if a result is found?
The code I'm working with is:
const _pong = function() {
return socket.emit('aye', {
pong: globals.uuid()
});
};
return socket.on('helo', async function(data) {
socket._uuid = data.uuid;
let key = 'ws-ping:' + data.uuid;
await cache.get(key).then((result) => {
if(result !== undefined) {
_pong();
}
});
......
});
I basically need to just ignore the rest of the socket.on function if a result is found using the given key? but it seems to continue?
Because you're using await, you can ditch the .then, and get the result directly, in the same block - if the result exists, then just return (after _ponging, if that's the logic you're looking for):
return socket.on('helo', async function(data) {
socket._uuid = data.uuid;
let key = 'ws-ping:' + data.uuid;
const result = await cache.get(key);
if (result !== undefined) {
_pong();
return;
}
// ...
});

S3 file upload issue using nodejs lambda

I tried to upload an image to aws s3 bucket using nodejs lambda function. But for the initial call no files are getting uploaded and when trying for the next time, previous file getting uploaded.
also it is not working as synchronously, even if we used inside async await.
async uploadAttachment(attachment, id) {
try {
let res = '';
attachment.forEach(async (element, index) => {
const encodedImage = element.base64;
const fileTypeInfo = element.fileextType;
const fileName = `${Math.floor(new Date() / 1000)}_${index + 1}.${fileTypeInfo}`;
const decodedImage = Buffer.from(encodedImage, 'base64');
const filePath = `${id}/${fileName}`;
const params = {
Body: decodedImage,
Bucket: process.env.S3_FRF_BUCKET,
Key: filePath
};
res = await s3.upload(params, () => {});
});
return res;
} catch (e) {
throw e;
}
}
Any suggestions ?
.forEach on a regular array does not work with async/await like one would expect. Use a for..of instead.
for(let element of attachment) {
// await actually waits here
}
You need for loop as suggested before as well as adding promise in s3.upload
await s3.upload(params, () => {}).promise();

NodeJS: Confusion about async "readdir" and "stat"

In the docs it shows two versions of readdir and stat. Both of which have an async and sync version readir/readdirSync and stat/statSync.
Because readidir and stat are async I would expect them to return a Promise but when trying to use async/await the script doesnt wait for readdir to resolve and if I use .then/.catch I get an error cannot read .then of undefined.
All I'm trying to do here is map the directories that exist inside of the directory the script is being ran inside of to the dirsOfCurrentDir map.
Returns error cannot read .then of undefined
const fs = require('fs');
const directory = `${ __dirname }/${ process.argv[2] }`;
const dirsOfCurrentDir = new Map();
fs.readdir(directory, (err, files) => {
let path;
if (err)
return console.log(err);
files.forEach(file => {
path = directory + file;
fs.stat(path, (err, stats) => {
if (err)
return console.log(err);
dirsOfCurrentDir.set(file, directory);
});
});
}).then(() => console.log('adasdasd'))
console.log(dirsOfCurrentDir)
Returns Map {}
const foo = async () => {
await fs.readdir(directory, (err, files) => {
let path;
if (err)
return console.log(err);
files.forEach(file => {
path = directory + file;
fs.stat(path, (err, stats) => {
if (err)
return console.log(err);
dirsOfCurrentDir.set(file, directory);
});
});
});
};
foo()
console.log(dirsOfCurrentDir)
Edit
I ended up going with the synchronous versions of both of these functions readdirSync and statSync. While I would feel better using the async methods or promisify I still have not figured out how to get my code working correctly using either.
const fs = require('fs');
const directory = `${ __dirname }/${ process.argv[2] }`;
const dirsOfCurrentDir = new Map();
const dirContents = fs.readdirSync(directory);
dirContents.forEach(file => {
const path = directory + file;
const stats = fs.statSync(path);
if (stats.isDirectory())
dirsOfCurrentDir.set(file, path);
});
console.log(dirsOfCurrentDir); // logs out the map with all properties set
Because readidir and stat are async I would expect them to return a Promise
First off, make sure you know the difference between an asynchronous function and an async function. A function declared as async using that specific keyword in Javascript such as:
async function foo() {
...
}
does always return a promise (per the definition of a function declared with the async keyword).
But an asynchronous function such as fs.readdir() may or may not return a promise, depending upon its internal design. In this particular case, the original implementation of the fs module in node.js only uses callbacks, not promises (its design predates the existence of promises in node.js). Its functions are asynchronous, but not declared as async and thus it uses regular callbacks, not promises.
So, you have to either use the callbacks or "promisify" the interface to convert it into something that returns a promise so you can use await with it.
There is an experimental interface in node.js v10 that offers built-in promises for the fs module.
const fsp = require('fs').promises;
fsp.readdir(...).then(...)
There are lots of options for promisifying functions in an earlier version of node.js. You can do it function by function using util.promisify():
const promisify = require('util').promisify;
const readdirP = promisify(fs.readdir);
const statP = promisify(fs.stat);
Since I'm not yet developing on node v10, I often use the Bluebird promise library and promisify the whole fs library at once:
const Promise = require('bluebird');
const fs = Promise.promisifyAll(require('fs'));
fs.readdirAsync(...).then(...)
To just list the sub-directories in a given directory, you could do this:
const fs = require('fs');
const path = require('path');
const promisify = require('util').promisify;
const readdirP = promisify(fs.readdir);
const statP = promisify(fs.stat);
const root = path.join(__dirname, process.argv[2]);
// utility function for sequencing through an array asynchronously
function sequence(arr, fn) {
return arr.reduce((p, item) => {
return p.then(() => {
return fn(item);
});
}, Promise.resolve());
}
function listDirs(rootDir) {
const dirsOfCurrentDir = new Map();
return readdirP(rootDir).then(files => {
return sequence(files, f => {
let fullPath = path.join(rootDir, f);
return statP(fullPath).then(stats => {
if (stats.isDirectory()) {
dirsOfCurrentDir.set(f, rootDir)
}
});
});
}).then(() => {
return dirsOfCurrentDir;
});
}
listDirs(root).then(m => {
for (let [f, dir] of m) {
console.log(f);
}
});
Here's a more general implementation that lists files and offers several options for both what to list and how to present the results:
const fs = require('fs');
const path = require('path');
const promisify = require('util').promisify;
const readdirP = promisify(fs.readdir);
const statP = promisify(fs.stat);
const root = path.join(__dirname, process.argv[2]);
// options takes the following:
// recurse: true | false - set to true if you want to recurse into directories (default false)
// includeDirs: true | false - set to true if you want directory names in the array of results
// sort: true | false - set to true if you want filenames sorted in alpha order
// results: can have any one of the following values
// "arrayOfFilePaths" - return an array of full file path strings for files only (no directories included in results)
// "arrayOfObjects" - return an array of objects {filename: "foo.html", rootdir: "//root/whatever", full: "//root/whatever/foo.html"}
// results are breadth first
// utility function for sequencing through an array asynchronously
function sequence(arr, fn) {
return arr.reduce((p, item) => {
return p.then(() => {
return fn(item);
});
}, Promise.resolve());
}
function listFiles(rootDir, opts = {}, results = []) {
let options = Object.assign({recurse: false, results: "arrayOfFilePaths", includeDirs: false, sort: false}, opts);
function runFiles(rootDir, options, results) {
return readdirP(rootDir).then(files => {
let localDirs = [];
if (options.sort) {
files.sort();
}
return sequence(files, fname => {
let fullPath = path.join(rootDir, fname);
return statP(fullPath).then(stats => {
// if directory, save it until after the files so the resulting array is breadth first
if (stats.isDirectory()) {
localDirs.push({name: fname, root: rootDir, full: fullPath, isDir: true});
} else {
results.push({name: fname, root: rootDir, full: fullPath, isDir: false});
}
});
}).then(() => {
// now process directories
if (options.recurse) {
return sequence(localDirs, obj => {
// add directory to results in place right before its files
if (options.includeDirs) {
results.push(obj);
}
return runFiles(obj.full, options, results);
});
} else {
// add directories to the results (after all files)
if (options.includeDirs) {
results.push(...localDirs);
}
}
});
});
}
return runFiles(rootDir, options, results).then(() => {
// post process results based on options
if (options.results === "arrayOfFilePaths") {
return results.map(item => item.full);
} else {
return results;
}
});
}
// get flat array of file paths,
// recursing into directories,
// each directory sorted separately
listFiles(root, {recurse: true, results: "arrayOfFilePaths", sort: true, includeDirs: false}).then(list => {
for (const f of list) {
console.log(f);
}
}).catch(err => {
console.log(err);
});
You can copy this code into a file and run it, passing . as an argument to list the directory of the script or any subdirectory name you want to list.
If you wanted fewer options (such as no recursion or directory order not preserved), this code could be reduced significantly and perhaps made a little faster (run some async operations in parallel).

asynchronous counting totals

I have a gulp task that loops through a set of files and validates each one. At the end, I want to report the total number of valid files.
const fs = require('fs')
const path = require('path')
const gulp = require('gulp')
gulp.task('validate', function (callback) {
function validate(json) { /*...*/ }
let files = ['file1.json', 'file2.json', 'file3.json']
let count = 0
files.forEach(function (file) {
fs.readFile(path.join(__dirname, file), 'utf8', function (err, data) {
let is_valid = validate(JSON.parse(data))
if (!is_valid) {
console.error(`Invalid! ${file}`)
} else {
console.log(`Valid: ${file}`)
count++
}
})
})
console.log(`Total valid files: ${count}`)
})
When running this, my console reports:
Total valid files: 0
Valid schema: file1.json
Valid schema: file2.json
Valid schema: file3.json
I suspect the count is reported before the files are read, due to the asynchronous nature of fs.readFile(). When I try logging the total count before .forEach() is called, I get the same result (as expected). How do I log a global variable after an asynchronous function?
Note: I know the gulp convention is to pass a callback argument to the gulp task, but gulp calls this task with a default callback function that can only be passed an Error. Can I somehow create my own custom callback function, to which I could pass count?
The problem is that 'fs' library is old and uses only callbacks.
The solution I recommend is to promisify the fs.readFile() method and add those promises in array and promise.all them.
Side node: use forof instead of forEach when you use async functions.
const { promisify } = require('util');
const fs = require('fs');
const readFileAsync = promisify(fs.readFile);
const path = require('path');
let files = ['file1.json', 'file2.json', 'file3.json'];
let count = 0
let tasks = [];
function validate(json) { /*...*/ }
for (let file of files) {
tasks.push(readFileAsync(path.join(__dirname, file), 'utf8')
.then(data => {
let is_valid = validate(JSON.parse(data))
if (!is_valid) {
console.error(`Invalid! ${file}`)
} else {
console.log(`Valid: ${file}`)
count++
}
})
.catch(err => console.log(err))
);
}
Promise.all(tasks)
.then(res => console.log(`Total valid files: ${count}`))
.catch(err => console.log(err));

Resources