I want to know the how to use setImmediate with async await and handle errors properly. I have written following code. But I am not sure it is adhering to the best practices.
There is a route in my express app
router.get('/parseinvoice', async (req, res, next) => {
try {
const parsedInvoiceResponse = await userhelper.getParseInVoiceList();
res.json({parsedInvoiceResponse})
} catch (error) {
res.json({});
}
});
The userhelper class code
var userhelper = {};
const fs = require('fs'),
path = require('path'),
filePath = path.join(__dirname, './input_user_story_12.txt');
const { promisify } = require('util')
const readFile = promisify(fs.readFile);
userhelper.getParseInVoiceList = async function() {
return new Promise( async (resolve, reject) => {
try {
setImmediate(async function() {
try {
const contents = await readFile(filePath, 'UTF-8');
resolve(contents);
} catch (error) {
reject(error);
}
});
} catch (error) {
reject(error);
}
});
}
module.exports = userhelper;
Although I am getting the response. I am not sure about the setImmediate part, whether the multiple try catch are required. Is there any neat way to write the below code?.
try {
setImmediate(async ()=>{
var res = await readFile(filePath, 'UTF-8');
})
} catch(err) {
}
2.
await setImmediate(()=>{
var res = await readFile(filePath, 'UTF-8');
}).catch(){}
3.
try {
await setImmediate(()=>{
await readFile(filePath, 'UTF-8');
}).catch(){}
} catch() {
}
should return result into res
const res = await setImmediate(()=>{
return readFile(filePath, 'UTF-8');
})
Why are you not just using?
userhelper.getParseInVoiceList = async function() {
return await readFile(filePath, 'UTF-8');
}
Expanding on #Dan D.'s answer, you can await the resolution of an asynchronous setImmediate prior to calling the asynchronous promisified readFile, but I am not sure why you would need to do this without more context.
userhelper.getParseInVoiceList = async function() {
await new Promise((resolve) => setImmediate(() => resolve()));
return await readFile(filePath, 'UTF-8');
}
Related
I have a lambda function with the structure below,
It used to work in older versions of nodejs but it doesn't work with the newer versions.
I know my code structure is quite messy and wrong but I can't get my head around it. I'm trying to use Promise.all but I'm obviously doing something wrong cause it's not getting executed at all.
By the way, I'm not getting any errors. The promise.all method never gets executed.
let AWS = require('aws-sdk');
exports.handler = async(event, context, callback) => {
let result = {};
try {
result = await getOrder(sql, 0);
result.map(
(dataField) => {
});
}
catch (error) {
console.log(error);
callback(error);
}
var today_result = [];
const groupKey = i => i.user_id + '_' + i.when;
const counts = _.countBy(followingsIDs, groupKey);
const isMulti = i => counts[groupKey(i)] > 1;
const multiPropkey = i => ({ multiplekey: isMulti(i) ? groupKey(i) : groupKey(i) });
const multiProp = i => ({ multiple: isMulti(i) ? counts[groupKey(i)] : 1 });
const updated = _.map(followingsIDs, i => _.extend(i, multiProp(i), multiPropkey(i)));
const uniqResult = _.uniq(updated, function(d) { return d.multiplekey });
// Doesn’t execute from here —>
await Promise.all(uniqResult.map(async(dataField) => {
console.log("test_");
dosomething()
if (true) {
let sql = `INSERT INTO ….`
result = await getOrder(sql, 0);
try {
const data = await sns.publish(params).promise();
}
catch (e) {
console.log(e.stack);
response.result = 'Error';
}
}
}));
// Till here <----
callback(null, uniqResult);
};
let getOrder = async(sql, params) => {
return new Promise((resolve, reject) => {
pool.getConnection((err, connection) => {
if (err) throw err;
connection.query(sql, params, (err, results) => {
if (err) {
reject(err);
}
// console.log("-----Query Done!");
connection.release();
// console.log("-----Data: ", results);
resolve(results);
});
});
});
};
What are you awaiting to? The uniqResult is just declared as an empty array. Immediately after that you pass it to Promise.all. You need to fill it with Promises and then pass it to Promise.all.
Can anyone help to implement MS SQL transactions in Node.js . I am try to execute multiple stored procedures inside a promise.
Method 1
const executeProcedure = async (data1, data2) => {
try {
// sql connection
let dbConn = new sql.ConnectionPool(config));
await dbConn.connect();
let transaction = new sql.Transaction(dbConn);
await transaction.begin().then(async()=> {
// tranaciton create
// begin tran
let result = await insertOperation(transaction, data1);
let result2 = await updateOperation(transaction, data2);
let result1 = await Promise.all([result, result2]);
await transaction.commit();
dbConn.close();
}).catch(async(err)=> {
await transaction.rollback();
dbConn.close();
throw err;
});
return {};
}
catch (error) {
throw(error);
}
}
method 2
const insertOperation = async (transaction,data1) => {
return new Promise((resolve, reject) => {
try {
var request = new sql.Request(transaction);
request.input('data1' , sql.NVarChar(40) , data1)
.execute('dbo.insertOperation').then((recordSet) => {
resolve(recordSet.recordsets);
}).catch((err) => {
reject(err);
});
}
catch (error) {
reject(error);
}
});
}
method 3
const updateOperation = async (transaction,data2) => {
return new Promise((resolve, reject) => {
try {
var request = new sql.Request(transaction);
request.input('data2' , sql.NVarChar(40) , data2)
.execute('dbo.updateOperation').then((recordSet) => {
resolve(recordSet.recordsets);
}).catch((err) => {
reject(err);
});
}
catch (error) {
reject(error);
}
});
}
Now I get this error
Can't rollback transaction. There is a request in progress.
anybody please help me to solve this problem
You make some unnecessary Promise wrapper.
Example below:
const insertOperation = async (request, data1) => {
request.input("data1", sql.NVarChar(40), data1);
const result = await request.execute("dbo.insertOperation");
return result.recordsets;
};
const updateOperation = async (request, data2) => {
request.input("data2", sql.NVarChar(40), data2);
const result = await request.execute("dbo.updateOperation");
return result.recordsets;
};
const executeProcedure = async (data1, data2) => {
// sql connection
const dbConn = new sql.ConnectionPool(config);
await dbConn.connect();
let transaction;
try {
transaction = new sql.Transaction(dbConn);
await transaction.begin();
const request = new sql.Request(transaction);
const results = await Promise.all([
insertOperation(request, data1),
updateOperation(request, data2),
]);
await transaction.commit();
return results;
} catch (err) {
await transaction.rollback();
throw err;
} finally {
await dbConn.close();
}
};
#ikhvjs please check the below use case as well
try {
request.input("data", sql.NVarChar(40), data1);
const result = await request.execute("dbo.insertOperation");
return result.recordsets;
} catch (err) {
throw err;
}
};
const updateOperation = async (request, data2) => {
try {
request.input("data", sql.NVarChar(40), data2);
const result = await request.execute("dbo.updateOperation");
return result.recordsets;
} catch (err) {
throw err;
}
};
const executeProcedure = async (data1, data2) => {
try {
// sql connection
const dbConn = new sql.ConnectionPool(config);
await dbConn.connect();
const transaction = new sql.Transaction(dbConn);
try {
await transaction.begin();
const request = new sql.Request(transaction);
const results = await Promise.all([
insertOperation(request, data1),
updateOperation(request, data2),
]);
await transaction.commit();
} catch (err) {
await transaction.rollback();
throw err;
} finally {
await dbConn.close();
}
} catch (error) {
throw error;
}
};```
I'm trying to read files from my disk and push it into MongoDB's collections, but connection closing before it done and I get error: MongoError: Topology is closed, please connect.
async function launch() {
try {
await mongo.connect();
console.log("Connection established");
const database = mongo.db('task');
const firstCol = database.collection('first');
const secondCol = database.collection('second');
const insertIntoCollection = async (file, col) => {
fs.readFile(file, async function(err, data) {
if (err) throw err;
const json = JSON.parse(data);
const result = await col.insertMany(json);
console.log(result.insertCount);
});
}
await insertIntoCollection('data/first.json', firstCol);
await insertIntoCollection('data/second.json', secondCol);
} finally {
await mongo.close();
}
}
launch().catch(console.dir);
What am I doing wrong?
In the above case mongo client will close before the insertIntoCollection function trigger since it is a promise function and promise will not over before the finally trigger.I hope below code will fulfil your expectations.
async function launch() {
try {
await mongo.connect();
console.log("Connection established");
const database = mongo.db('task');
const firstCol = database.collection('first');
const secondCol = database.collection('second');
const insertIntoCollection = async (file, col) => {
return new Promise((resolve, reject) => {
fs.readFile(file, async (err, data) => {
try {
if (err) reject(err);
const json = JSON.parse(data);
const result = await col.insertMany(json);
console.log(result.insertCount);
resolve(result.insertCount)
} catch (err) {
reject(err)
}
});
})
}
await insertIntoCollection('data/first.json', firstCol);
await insertIntoCollection('data/second.json', secondCol);
} finally {
await mongo.close();
}
}
launch().catch(console.dir);
I am experimenting with async/await code to read file.
Here's my code:
var fs = require('fs');
function readFile(fileName) {
return new Promise(resolve => {
//console.log(test);
fs.readFile(fileName, 'utf8', function (err, data) {
if (err) throw err;
console.log(fileName)
console.log(data)
})
resolve();
});
}
async function run() {
await readFile('file1.txt');
await readFile('file2.txt');
readFile('file3.txt');
}
run();
But the result is still random. It means file3 sometime read before file2. Where am I doing wrong?
There are many ways to achieve that.
Most of them is explained in this link
I'll write simple one:
1) using util.promisify to convert callback method to promise:
const fs = require('fs');
const util = require('util');
const readFile = (fileName) => util.promisify(fs.readFile)(fileName, 'utf8');
(async () => {
try {
const files = ['file1.txt', 'file2.txt', 'file3.txt'];
for (const file of files) {
console.log(
await readFile(file)
);
}
}
catch (error) {
console.error(error);
}
})();
2) *Sync methods. Since Your code is not dealing with concurrency You can use *Sync methods:
const fs = require('fs');
try {
const files = ['file1.txt', 'file2.txt', 'file3.txt'];
for (const file of files) {
console.log(
fs.readFileSync(file, 'utf8')
);
}
}
catch (error) {
console.error(error);
}
BTW. Here is Your fixed code:
var fs = require('fs');
function readFile(fileName) {
return new Promise((resolve, reject) => {
fs.readFile(fileName, 'utf8', function (error, data) {
if (error) return reject(error);
console.log(fileName)
console.log(data)
resolve();
})
});
}
async function run() {
await readFile('file1.txt');
await readFile('file2.txt');
await readFile('file3.txt');
}
run();
since You're calling readFile and resolve at same async sequence it's being called at same time which is reason of race condition.
You've to wait for callback handling and then resolve it (inside callback scope).
There are a couple options with native node functionality
A) With the fs.promises API
You can use destructuring assignment on import to alias fs.promises as just fs
const { promises: fs } = require("fs");
(async () => {
try {
let file1 = await fs.readFile("file1.txt", "utf-8");
let file2 = await fs.readFile("file2.txt", "utf-8");
} catch (e) {
console.log("e", e);
}
})()
B) With util.promisify API
const fsSync = require("fs");
const {promisify} = require("util")
const fs = {
readdir: promisify(fsSync.readdir),
readFile: promisify(fsSync.readFile),
// etc
};
(async () => {
try {
let file1 = await fs.readFile("file1.txt", "utf-8");
let file2 = await fs.readFile("file2.txt", "utf-8");
} catch (e) {
console.log("e", e);
}
})()
Further Reading
How to read file with async/await properly?
Using filesystem in node.js with async / await
I have this code that serves every markdown file in the './markdown' folder. At '/api/markdown/filename'.
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
fs.readdir(markdownFolder, function(err, markdown) {
if (err) throw err;
markdown.forEach(function(file) {
fs.readFile(markdownFolder + file, 'utf8', function(err, file_content) {
if (err) throw err;
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
res.json({
'title': fileNoExtension,
'markdown': marked(file_content)
});
};
});
});
});
});
But i end having a ton of callbacks do the the nature of the 'fs' methods. How do i avoid this?
Using Q as promise library:
const Q = require('q');
const fs = require('fs');
const markdownFolder = './markdown/';
const readdir = Q.nfbind(fs.readdir);
const readFile = Q.nfbind(fs.readFile);
readdir(markdownFolder).then(markdown => {
const promises = [];
markdown.forEach(file => promises.push(readFile(markdownFolder + file, 'utf8')));
return Q.all(promises);
}).then(files => {
// Do your magic.
}).catch(error => {
// Do something with error.
});
You have different option.
Use named Function instead of anonymus functinos. It would make it a little bit more readable but you will still be using callbacks.
Use Promises, but you will need to use bluebird to wrap the fs module.
For a more advance option, you can use generators and Promises to make your code look more like a sync way. Take a look at co or bluebird.coroutine.
With Promises you could do like this:
const path = require('path');
var apiRouter = express.Router();
markdownFolder = './markdown/';
apiRouter.get('/:markdown_file_noext', function(req, res) {
readdir(markdownFolder)
.then((files) => {
const tasks = files.map((file) => {
const filePath = path.resolve(markdownFolder, file);
return readFile(filePath);
});
return Promise.all(tasks); // Read all files
})
.then((fileContents) => {
return fileContents.map((content) => {
fileNoExtension = file.slice(0, file.indexOf('.'));
if (req.params.markdown_file_noext == fileNoExtension) {
return {
'title': fileNoExtension,
'markdown': marked(content)
};
};
})
})
.then((results) => {
// It's better if you aggregate all results in one array and return it,
// instead of calling res.json for each result
res.json(results);
})
.catch((err) => {
// All errors are catched here
console.log(err);
})
});
function readdir(folderPath) {
return new Promise((resolve, reject) => {
fs.readdir(folderPath, (err, files) {
if (err) {
return reject(err);
}
resolve(files);
});
});
}
function readFile(filePath) {
return new Promise((resolve, reject) => {
fs.readFile(filePath, 'utf8', (err, file_content) => {
if (err) {
return reject(err);
}
resolve(file_content);
});
});
}