Getting output of mysql queries synchronously in node.js - node.js

I have the following code:
app.get('/showtable1', (req, res) => {
console.log("yo ho");
mysqlconnection.query('SELECT d_name FROM datacenters', (err, rows, fields) => {
if (!err) {
var array = [];
var tier = ['Class 0','Class 1','Class 2','Class 3'];
var str = '';
Object.keys(rows).forEach(function(key) {
var row = rows[key];
array.push(row.d_name);
});
for( var i = 0 ; i<array.length ; i++) {
str += '<tr><td>'+array[i]+'</td>';
console.log('sfsf');
for (var j=0 ; j< tier.length ; j++) {
let query1 = "SELECT * FROM `datacenters` WHERE d_name = '"+array[i]+"' AND d_tiers LIKE '%"+tier[j]+"%'"
mysqlconnection.query(query1,(err,rows) => {
if(!err) {
if(rows.length !=0) {
console.log(1);
} else {
console.log(0);
}
}
});
}
}
res.send(rows);
}
else
console.log(err);
})
});
The output of this code is:
yo ho
sfsf
sfsf
sfsf
1
1
0
0
1
1
1
0
1
1
1
0
But I need the output as:
yo ho
sfsf
1
1
0
0
sfsf
1
1
1
0
sfsf
1
1
1
0
The first output is coming because the code is running asynchronously; but I need to run it synchronously. How can I do this?

Asynchronous task does not have order. You can use Promise and async/await to get ordered output.
You can promisify your queries like:
function getNames() {
return new Promise((resolve, reject) => {
mysqlconnection.query('SELECT d_name FROM datacenters', (err, rows, fields) => {
if (!err) {
reject(err);
} else {
resolve(rows)
}
})
});
}
function getDatacenters(firstVal, tier) {
const query1 = "SELECT * FROM `datacenters` WHERE d_name = '"+firstVal+"' AND d_tiers LIKE '%"+tier+"%'";
return new Promise((resolve, reject) => {
mysqlconnection.query(query1,(err,rows) => {
if(!err) {
reject(err)
} else {
resolve(rows)
}
});
})
}
Then in your route
app.get('/showtable1', async (req, res) => {
console.log("yo ho");
const namesArray = [];
const names = await getNames();
Object.keys(names).forEach(function(key) {
var row = names[key];
namesArray.push(row.d_name);
});
const namesVal = [];
for (let i = 0; i < namesArray.length; i++) {
for (let j = 0; j < tier.length; j++) {
namesVal.push(getDatacenters(entry, tier));
}
}
// ordered array or result
const result = await Promise.all(namesVal)
...
// rest of the code
})
Note: I don't know which library you're using. Some libs provide Promise support so you don't have to write your own wrappers

Related

Node.js split file lines

I want to write a script that divides the lines read from the file into packages of 25, unfortunately the sample package returns 40 codes. I would like to do so that, for example, he divided me into packages of 25 items. I mean, I have, for example, 60 codes, this creates 2 packages of 25, and one with 10 codes. Unfortunately, I can't handle it.
const fs = require('fs');
fs.readFile('code.txt', function (err, data) {
if (err) throw err;
const array = data.toString().split("\n");
let count = 0;
let items = [];
for (let i in array) {
items.push({
PutRequest: {
Item: {
code: array[i]
}
}
});
let params = {
RequestItems: {
'TABLE_NAME': items
}
};
if (count === 25) {
dynamoDB.batchWrite(params, function (err, data) {
if (err) {
console.log(err);
} else {
count = 0;
items = [];
}
});
}else{
count++;
}
}
});
code.txt content
https://0bin.net/paste/NA8-4hkq#1Ohwt5uUkQqE0YscwnxTX2gxEqlvAUVKp1JRipBCsZg
Any idea what I do wrong?
Your dynamoDB.batchWrite() is asynchronous. Thus its callback is executed only after the loop has completed. So items and count are never reset ...
The easiest would be, if you could switch to an promise based approach like the following
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
while (lines.length > 0) {
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
}
}
A callback based approach could look like this
const BATCHSIZE = 25;
fs.readFile("code.txt", "utf-8", (err, data) => {
const lines = data.split("\n");
function writeBatch() {
if (!lines.length) return;
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
dynamoDb.batchWrite(params, err => {
if (err) ...
else writeBatch();
});
}
writeBatch();
}
The function writeBatch takes a certain number of lines from your original array and writes them into the database. Only afer the write into the DB was successful, it recursively calls itself and handles the next batch. But be aware, that this approach may exceed the maximum call stack size and throw an error.
You can also make either of this approaches not manipulate the lines array (which may be quite expensive), but just get out the current slice
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
let currentIndex = 0;
while (currentIndex < lines.length) {
const items = lines.slice(currentIndex, currentIndex + BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
currentIndex += BATCHSIZE;
}
}
and
const BATCHSIZE = 25;
fs.readFile("code.txt", "utf-8", (err, data) => {
const lines = data.split("\n");
function writeBatch(currentIndex) {
if (currentIndex >= lines.length) return;
const items = lines.slice(currentIndex, currentIndex + BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
dynamoDb.batchWrite(params, err => {
if (err) ...
else writeBatch(currentIndex + BATCHSIZE);
});
}
writeBatch(0);
}
To prevent stumbling into a maximum callstack exception you may also add the next batch to the eventloop and not call it recursively. Ie
dynamoDb.batchWrite(params, err => {
if (err) ...
else setTimeout(()=> { writeBatch(currentIndex + BATCHSIZE);}, 0);
});
This way you won't build up a massive callstack from recursive calls.
To keep track of how many records are already saved to the db you could simply store the current counter in a file. When you restart the process, load that file and check how many lines to skip. Don't forget to remove the file, once all records have been saved ... For example with the first approach:
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
const skipLines = 0;
try {
skipLines = +(await fs.readFile("skip.txt", "utf-8"));
if (isNaN(skipLines)) skipLines = 0;
lines.splice(0, skipLines);
} catch (e) {
skipLines = 0;
}
while (lines.length > 0) {
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
skipLines += BATCHSIZE;
await fs.writeFile("skip.txt", `${skipLines}`);
}
try {
await fs.unlink("skip.txt");
} catch (e) {
}
}

Why on my NodeJS+Express REST API a promise calling my function fails while the same promise with setTimeout works?

I have a NodeJS+Express REST API method executing reverse geocoding (using Google's Maps API).
I'm trying to solve it with Promises but the 'then' is getting executed before my function returns with the answers from Google.
When testing the same code just calling a setTimeout, it works as expected. Please see comments in the code (simplify version).
app.get('/api/v1/events', verifyToken, async (req, res) => {
await db.poolPromise.then(pool => {
return pool.request()
.input('UserId', db.sql.UniqueIdentifier, res.authData.userId)
.input('DateFrom', db.sql.DateTime2(7), req.query.dateFrom)
.input('DateTill', db.sql.DateTime2(7), req.query.dateTo)
.output('UserIdAuthorized', db.sql.Bit)
.execute('sp')
}).then(result => {
let output = (result.output || {})
if (!output.UserIdAuthorized) {
res.sendStatus(403)
}
else if (result.recordset.length > 0) {
(new Promise( (resolve) => {
//resolve(123) // this one works as expected
//setTimeout(resolve, 3000, 'temp success') // this one works as expected
// *** this one get passed and the following then is being executed before it answers ***
resolve( getAddress_TEST(result.recordset) )
// **************************************************************************************
})).then(function (value) {
res.json(
{
meta: { count: 10 }, //this is just a sample
result: value // *** this one fails with undefined ***
})
})
} else {
res.sendStatus(404)
}
}).catch(err => {
res.sendStatus(500)
console.error(err)
})
});
const nodeGeocoder_options = {
provider: 'google',
apiKey: process.env.GOOGLE_API_KEY
}
async function getAddress_TEST(recordset) {
//sample recordset for debugging - as you dont have my database
recordset = [{'eventId':14205556,'Lat':54.57767,'Lon':-2.4920483},{'eventId':14205558,'Lat':54.57767,'Lon':-2.492048},{'eventId':14205579,'Lat':53.416908,'Lon':-2.952071},{'eventId':14205588,'Lat':52.644448,'Lon':-1.153185},{'eventId':14205601,'Lat':52.29174,'Lon':-1.532283},{'eventId':14205645,'Lat':52.644448,'Lon':-1.153185},{'eventId':14205801,'Lat':53.68687,'Lon':-1.498708},{'eventId':14206041,'Lat':51.471521,'Lon':-0.2038033},{'eventId':14206049,'Lat':51.471521,'Lon':-0.2038033},{'eventId':14206072,'Lat':51.471521,'Lon':-0.2038033}]
let geocoder = nodeGeocoder(nodeGeocoder_options)
let ps = []
for (var i = 0, length = recordset.length; i < length; i++) {
if (i == 0 || !(i > 0
&& recordset[i - 1].Lat == recordset[i].Lat
&& recordset[i - 1].Lon == recordset[i].Lon)) {
ps.push(new Promise(function (resolve) {
resolve(reverseGeocode(geocoder, recordset[i].Lat, recordset[i].Lon))
}))
} else {
ps.push('-')
}
}
await Promise.all(ps)
.then(function (values) {
for (var i = 0, length = values.length; i < length; i++) {
if (values[i] != '-') {
recordset[i].locationAddress = values[i]
} else {
recordset[i].locationAddress = recordset[i - 1].locationAddress
}
}
}).then(function () {
recordset.forEach(function (v) {
delete v.Lat
delete v.Lon
});
console.log(recordset)
return recordset
})
};
async function reverseGeocode(geocoder, lat, lon) {
let address = '+'
if (lat != 0 && lon != 0) {
await geocoder.reverse({ lat: lat, lon: lon })
.then(res => {
address = res[0].formattedAddress
})
.catch(err => {
console.error(err)
});
}
return address
};
I'm sure it is something simple that I'm missing here...
The basic problem is that your getAddress_TEST function returns a promise that fulfills with nothing (undefined), because it does not contain a return statement. The return recordset is in a then() callback, from where it affects the promise resolution of the awaited promise, but that result is thrown away.
If you want to use async/await, you should get rid of any new Promise and then calls:
app.get('/api/v1/events', verifyToken, async (req, res) => {
try {
const pool = await db.poolPromise
const result = await pool.request()
.input('UserId', db.sql.UniqueIdentifier, res.authData.userId)
.input('DateFrom', db.sql.DateTime2(7), req.query.dateFrom)
.input('DateTill', db.sql.DateTime2(7), req.query.dateTo)
.output('UserIdAuthorized', db.sql.Bit)
.execute('sp')
let output = (result.output || {})
if (!output.UserIdAuthorized) {
res.sendStatus(403)
} else if (result.recordset.length > 0) {
const value = await getAddress_TEST(result.recordset)
res.json({
meta: { count: 10 }, //this is just a sample
result: value
})
} else {
res.sendStatus(404)
}
} catch(err) {
res.sendStatus(500)
console.error(err)
}
});
const nodeGeocoder_options = {
provider: 'google',
apiKey: process.env.GOOGLE_API_KEY
}
async function getAddress_TEST(recordset) {
const geocoder = nodeGeocoder(nodeGeocoder_options)
const ps = recordset.map((record, i) => {
if (i == 0 || !(i > 0
&& recordset[i - 1].Lat == record.Lat
&& recordset[i - 1].Lon == recordLon)) {
return reverseGeocode(geocoder, recordset[i].Lat, recordset[i].Lon))
} else {
return '-'
}
});
const values = await Promise.all(ps)
// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
for (var i = 0, length = values.length; i < length; i++) {
if (values[i] != '-') {
recordset[i].locationAddress = values[i]
} else {
recordset[i].locationAddress = recordset[i - 1].locationAddress
}
}
recordset.forEach(function (v) {
delete v.Lat
delete v.Lon
});
console.log(recordset)
return recordset
// ^^^^^^^^^^^^^^^^
}
async function reverseGeocode(geocoder, lat, lon) {
if (lat != 0 && lon != 0) {
const res = await geocoder.reverse({ lat: lat, lon: lon })
return res[0].formattedAddress
}
return '+'
}

How to get code to execute in order in node.js

I am trying to finish my script, but for some reason i don't know, it refuses to execute in the order i put it in.
I've tried placing a 'wait' function between the JoinRequest update function and the following code, but when run, it acts as if the function call and wait function were the other way round, countering the point of the wait().
const Roblox = require('noblox.js')
var fs = require('fs');
var joinRequests = []
...
function wait(ms) {
var d = new Date();
var d2 = null;
do { d2 = new Date(); }
while(d2-d < ms*1000);
};
...
function updateJReqs() {
Roblox.getJoinRequests(4745601).then((array) => {
var i;
var final = [];
for(i = 0; i < array.length; i++) {
final.push(array[i].username);
};
if(final === '') {
final = '-None';
};
joinRequests = final
console.log('Updated join requests.')
});
}
function check() {
setTimeout(() => {
fs.readFile('Request.txt',encoding = 'utf-8', function(err, data) {
if (err) {
check();
} else {
updateJReqs(); //for some reason this function is executed alongside the below, not before it.
// Tried putting wait(x) in here.
console.log('Request received: ' + data)
var solution = joinRequests
console.log('Fuffiling request with ' + solution)
fufillRequest(solution)
fs.unlink('Request.txt', function(err) {
if(err) throw err;
});
check();
}
});
}, 400)
}
check();
The script is supposed to wait until a file is created (accomplished), update the list of join requests (accomplished) and then create a new file with the list of join requests in(not accomplished).
if I understand your code you work with async code, you need to return a promise in updateJReqs and add a condition of leaving from the function because you have an infinite recursion
function updateJReqs() {
return new Promise(resolve => {
Roblox.getJoinRequests(4745601).then((array) => {
var i;
var final = [];
for(i = 0; i < array.length; i++) {
final.push(array[i].username);
};
if(final === '') {
final = '-None';
};
joinRequests = final
console.log('Updated join requests.')
resolve();
});
}
}
async function check() {
setTimeout(() => {
fs.readFile('Request.txt',encoding = 'utf-8', function(err, data) {
if (err) {
await check();
} else {
await updateJReqs();
// Tried putting wait(x) in here.
console.log('Request received: ' + data)
var solution = joinRequests
console.log('Fuffiling request with ' + solution)
fufillRequest(solution)
fs.unlink('Request.txt', function(err) {
if(err) throw err;
});
// you dont have an exit from your function check();
return 'Success';
}
});
}, 400)
}
check().then(res => console.log(res));

is there a way to avoid set a timeout

I'm trying to collect all values from a mysql table with all the values of the referenced_table_name for each index of the table.
How avoid set a random time out while waiting for a promise
To collect the expected information i need to set a random time out, otherwise my object is undefined...
module.exports = {
getTable: async (req, res) => {
const tablename = req.params.table,
dbName = req.params.dbName;
let jsonResult = {};
getTableValues(dbName, tablename)
.then(tableValues => {
getTableIndexedCol(dbName, tablename)
.then(indexedColumns => {
let indexedArr = {};
for (let index = 0; index < indexedColumns.length; index++) {
const element = indexedColumns[index],
column = element.column_name,
referencedTable = element.referenced_table_name;
let allValuesRefTable = new Array();
getTableValues(dbName, referencedTable)
.then(referencedTableValues => {
for (let i = 0; i < referencedTableValues.length; i++) {
const el = referencedTableValues[i];
allValuesRefTable.push(el.name);
}
})
.catch(err => console.log(err));
/*IF NO TIMEOUT DOESN'T WORK*/
setTimeout(function(){
indexedArr[column] = allValuesRefTable;
}, 100);
}
setTimeout(function(){
jsonResult = {
name: tablename,
rows : tableValues,
rowIndexed : indexedArr
}
res.json(jsonResult);
}, 5000);
})
.catch(err => console.log(err));
})
.catch(err => console.log(err));
}
};
Is there a way to don't use setTimeout? or how can I 'wait' that the promise is resolved?
Here is my function getTableIndexedCol for example:
async function getTableIndexedCol(dbName, tablename) {
const sqlRefTable = SELECT...;
return new Promise (async function(resolve, reject){
try{
[refTable, refTableFields] = await promisePool.query(sqlRefTable)
}
catch(err){
reject(err)
}
setTimeout(function () {
resolve(refTable);
}, 500);
})
If you are already using async/await you can use it all the way and avoid the "Promise Hell" (nested .then calls):
module.exports = {
getTable: async (req, res) => {
try {
const tablename = req.params.table,
dbName = req.params.dbName;
const tableValues = await getTableValues(dbName, tablename);
const indexedColumns = await getTableIndexedCol(dbName, tablename);
let indexedArr = {};
for (let index = 0; index < indexedColumns.length; index++) {
const element = indexedColumns[index],
column = element.column_name,
referencedTable = element.referenced_table_name;
let allValuesRefTable = new Array();
const referencedTableValues = await getTableValues(dbName, referencedTable);
for (let i = 0; i < referencedTableValues.length; i++) {
const el = referencedTableValues[i];
allValuesRefTable.push(el.name);
}
indexedArr[column] = allValuesRefTable;
}
const = jsonResult = {
name: tablename,
rows: tableValues,
rowIndexed: indexedArr
}
res.json(jsonResult);
} catch (err) {
console.log(err);
}
}
};

How to send result after executing all queries

I've got following code
var data=[];
var i = 0,
j = 9;
async.whilst(function () {
return i <= j;
}, function (next) {
connection.query('select * from table', function (err,rows, field) {
data.push(rows.length);
console.log(data);
});
i++;
next();
}, function (err) {
console.log(data);
});
I want to execute the query first and then return result. How should I do it. I've also done it with simple for loop but does not work.
I think you just need to call next() after getting the result of the query:
var data = [];
var i = 0;
var j = 9;
async.whilst(
function () { return i <= j; },
function (next) {
i++;
connection.query('select * from table', function(err, rows, field) {
data.push(rows.length);
console.log(data);
next();
});
},
function (err) {
console.log(data);
}
);
Here is another simple solution with async/await if you use babeljs:
(async function() {
let data = [];
let i = 0;
const j = 9;
while (i <= j) {
let length = await new Promise((resolve, reject) => {
connection.query('select * from table', function(err, rows, field) {
if (err) return reject(err);
resolve(rows.length);
});
});
data.push(length);
}
return data;
})()

Resources