nodejs getting db delayed result only works with console.log - node.js

I'm aware of the event-driven/non-blocking stuff of node and I've been using it for about 2 years...
recently I came through this problem I can't solve it even forcing a closure...
I'm asking database for a simple result: 'SELECT 1+1 as theResult'
(...) // previous code
var dbRows1 = {}; // empty object to hold rows
var dbRows2 = {}; // idem
var mysql = require( 'mysql' );
var db = mysql.createConnection( dbInfo ); // dbInfo has connction data
var test = function( a ) { dbRows2 = a };
db.connect(
function( err )
{
if (err) throw err.stack;
console.log( 'TID ->', db.threadId)
});
db.query (
'SELECT 1+1 AS XXX',
function( err, rows, fields )
{
console.log( 'rows ->', rows ); // works ok
dbRows1 = rows; // don't work because rows is still empty
test( rows ); // !!!should work but dbRows2 is empty at program end!!!
});
(...) // more code
console.log( dbRows2 );
the last line prints a empty object for dbRows2 ( { } )...
Any ideas why console.log() works and my test() function do not work...

Your test function is working, the problem is that, at the time your last console.log statement executes, the test function hasn't been executed yet. This is because both .connect and .query are asynchronous.
The program flow looks something like this:
connect is called and queued
query is called and queued
console.log(dbRows2) is called
(time passes...)
connect succeeds and calls back
(time passes...)
query succeeds and calls back
test is called assigning rows to dbRows2
You can verify this behavior yourself by placing some strategic logging in your code before each action and inside of the callbacks. If you were to do that you would see something like this in your console:
about to call connect
about to call query
logging dbRows2
connect succeeded
query succeeded
about to call test
Without knowing the exact structure of your code, the following provides a rough example of how you might structure your logic to ensure the code that depends on the query completing is only invoked after the query completes:
db.query (
'SELECT 1+1 AS XXX',
function( err, rows, fields )
{
console.log( 'rows ->', rows ); // works ok
dbRows1 = rows; // don't work because rows is still empty
test( rows ); // !!!should work but dbRows2 is empty at program end!!!
// Call doStuffThatDependsOnQuery here
doStuffThatDependsOnQuery();
});
(...) // more code that doesn't depend on db.query(...)
function doStuffThatDependsOnQuery() {
console.log( dbRows2 );
}

Related

Multple SQL queries in Node with oracledb

I'm new to Node and am having problems reading from Oracle.
I have the basic examples all set up and can issue basic queries, and process the results etc..
The problem I'm having is that I need to;
Execute one query (Q1)
For each item in the results of Q1 I need to execute a second query (Q2)
I need to combine the results of Q1 and Q2s into an array to return as a promise
I am struggling to find an example where I can perform #2 - call the same query multiple times for each item returned from Q1, using the same connection which was used for Q1.
My code is below - I first perform a read, then iterate through the results storing connection.execute objects which I then run via the Promise.all line - the result of which I just output as I want to get this working before I code the logic to combine the results of Q1 and Q2.
When I run this via mocha, the results of don't contain any data - I see the column headings but no data.
So what am I missing here?
// placeholder for the connection
let conn;
// return case list array
var caseList = [];
var queryList = [];
return new Promise((resolve, reject) => {
// retrieve connection
oracledb.getConnection({
user: dbconfig.user,
password: dbconfig.password,
connectString: dbconfig.connectString
}) // the connection is returned as a promise
.then(connection => {
console.log('Connected to the DB!');
// assign connection
conn = connection;
// execute statement
return connection.execute(
`select caseid, casereference, startdate from caseheader inner join orgobjectlink on caseheader.ownerorgobjectlinkid = orgobjectlink.orgobjectlinkid where orgobjectlink.username = :username`,
[params.username], {
outFormat: oracledb.OBJECT // set the output format to be object
}
);
})
.then(result => {
// iterate around rows
result.rows.forEach(row => {
var caseObj = {
caseID: row.CASEID,
reference: row.CASEREFERENCE,
dateAssigned: moment(row.STARTDATE).format('YYYY-MM-DD'),
username: params.username,
}
caseList.push(caseObj);
console.log(caseObj.caseID)
queryList.push(conn.execute(`select concernroleid, concernrolename from concernrole inner join caseparticipantrole on concernrole.concernroleid = caseparticipantrole.participantroleid where caseparticipantrole.caseid = :caseID and (caseparticipantrole.typecode = 'PRI' or caseparticipantrole.typecode = 'MEM')`,
[caseObj.caseID], {
outFormat: oracledb.OBJECT
}));
});
// build up queries
return Promise.all(queryList).then(results => {
console.log(results);
Promise.resolve(results);
}, err => {
console.log(err);
});
}).then({
if(conn){
console.log("Closing DB connection");
conn.close();
}
}).catch(err => {
console.log('Error', err);
});
});
Promise.all will not work for you as you want to use a single connection and as mentioned previously a connection will only do one thing at a time anyway. To solve this problem using promises, you'd have to build up and unwind a promise chain. I can show you an example, but it's nasty - probably better to just forget I mentioned it.
A better option would be to go into a simple for loop using async/await. I can show you can example of that too but again, I think this is the wrong move. We call this row by row fetching (a.k.a slow by slow).
It's likely the best solution for you will be to take the results from the first query and build up an array. Then execute the second query using one of these options to process the array. https://oracle.github.io/node-oracledb/doc/api.html#sqlwherein
You'll need to include the caseid column in the select clause and perhaps even order by that column so that post-processing of the result set is simplified in Node.js.
This solution has the potential to greatly improve performance and resource utilization, but that has to be balanced against the amount of data you have, the resources, etc. I could probably show you an example of this too, but it will take a bit longer and I'd want to get some more info from you to ensure we're on the right path.
One problem is the Promise.all().then... function doesn't return anything (and doesn't need the additional resolve()). The way to get this sorted is build small, testable, promise returning functions, and test them individually.
Starting simply, write a mocha test to connect to the database...
function connect() {
return oracledb.getConnection({
user: dbconfig.user,
password: dbconfig.password,
connectString: dbconfig.connectString
});
}
Here's one that can run a command on the db. Test this with a simple query that you know will return some results.
function executeCmd(connection, cmd, params) {
return connection.execute(cmd, params, { outFormat: oracledb.OBJECT });
}
With just these two (and one more) we can outline a simple function that does the job: connect to the database, run a select, process each result asynchronously, then disconnect.
function connectAndQuery(username) {
let connection;
return connect().then(result => {
connection = result;
let cmd = `select caseid, casereference, startdate from caseheader inner join orgobjectlink on caseheader.ownerorgobjectlinkid = orgobjectlink.orgobjectlinkid where orgobjectlink.username = :username`;
return executeCmd(connection, cmd, [username]);
}).then(result => {
let promises = result.rows.map(row => processCaseRow(connection, row, username));
return Promise.all(promises);
}).then(result => {
// result should be an array of caseObj's
return connection.close().then(() => result);
});
}
The last thing to build and test is a promise-returning function which processes a row from the main function above.
I had to take some liberty with this, but I think the objective is -- given a row representing a "case" -- build a case object, including a collection of "concernedRoles" that can be queried with the caseID. (that last bit was my idea, but you can build a separate collection if you like)
// return a promise that resolves to an object with the following properties...
// caseID, reference, dateAssigned, username, concernedRoles
// get concernedRoles by querying the db
function processCaseRow(connection, row, username) {
var caseObj = {
caseID: row.CASEID,
reference: row.CASEREFERENCE,
dateAssigned: moment(row.STARTDATE).format('YYYY-MM-DD'),
username: username
}
let cmd = `select concernroleid, concernrolename from concernrole inner join caseparticipantrole on concernrole.concernroleid = caseparticipantrole.participantroleid where caseparticipantrole.caseid = :caseID and (caseparticipantrole.typecode = 'PRI' or caseparticipantrole.typecode = 'MEM')`;
return executeCmd(connection, cmd, row.CASEID).then(result => {
caseObj.concernedRole = result
return caseObj
})
}

Mysql inserts with AWS Lambda + Node.js

I'm running nodejs function in Amazon Lambda. It is supposed to do an insert to mysql DB after a HTTP get. Everything seems to be fine -- looking at the cloudwatch logs the query is parsed correctly and if I copy paste the query to mysql console it does exactly what it is supposed to.
Essentially:
var mysql = require('mysql')
var connection = createConnection({ connection details });
connection.connect();
var query = connection.query('Insert into AAA select * \
from BBB where BBB.a = ?;', [parameter],
function(err, result) {}
);
connection.end();
The problems is that the Lambda version simply does nothing. Query is visible and correct and the function returns cleanly but it never actually inserts anything. I have the same problem with update query as well but all the mysql selects work and return stuff so the problem is not that. The insert also works when I run it on my machine -- when I push it to lambda the problem appears.
I tried to add a separate commit statement but couldn't get it working either. I'm clearly missing something but can't figure out what. Do I need to have a transaction block for updates?
EDIT: Per Mark B's request. I think I tried to be smarter than I am by showing only part of the code. The whole logic was:
exports.handler = function(event, context, callback){
if ( event.A == -1 ){
exports.updateDB(event, function(res) {
context.succeed(res)
}
}
};
exports.updateDB = function(event, callback) {
var mysql = require('mysql')
var connection = createConnection({ connection details });
connection.connect();
var query = connection.query( 'update products set A=? where product_id = ?;',
[parameters],
function(err,result){ });
var query = connection.query( 'insert into other_table select * from products where product_id = ?;',
[parameters],
function(err,result){ });
connection.commit(function(err) {
if(err) {
connection.rollback(function() {
throw(err);
});
}
connection.end();
});
callback({"ok":"ok"})
};
Per advice given here I made the following changes. I took the last callback away, and did put callbacks inside both connection.queries:
var query = connection.query( 'insert into other_table select * from products where product_id = ?;',
[parameters],
function(err,result){
callback({"ok":"ok"})
});
And it seems to work. I'm guessing now that the commit -part does nothing but it doesn't seem to break it either. It probably is obvious at this point that I'm not much of a developer and even less so familiar with node.js so I truly appreciate the help I got!
Please note that the query function is an asynchronous function, meaning that it will be no result available until the callback function is triggered. In your sample code, the connection is closed immediately after it was triggered, long before the callback is executed. Try changing the code so that connection is closed by in the callback function, e.g.
var query = connection.query('Insert into AAA select * \
from BBB where BBB.a = ?;', [parameter],
function(err, result) {
// now it is ok to close the connection
connection.end();
if (err) {
// error handling
}
else {
// do something with the result
}
}
);
By the way, since you are working with Lambda, the same thing applies to the callback(), context.succeed() and context.fail() function handlers. In other words, it is likely that you would like to call them where I wrote the comments about error and result handling above.

Promise inside loop in nodejs isn't working as expected

I have array of events, for each value there may/may not query fires to get data.
var eventTypes = [["write","write","write","write"],["write","write","read","write"]];
_.each(eventTypes,function(obj){
gettime(obj);
});
gettime=function(events){
var resultArray = [];
_.each(events,function(event){
if(event === "read"){
resultArray.push(makesqlquery(event));
}else{
resultArray.push({"time":current_time})
}
});
q.all(resultArray).then(function(finalResult){
insertIntoPostgreSQL(finalResult);
});
}
makesqlquery = function(event){
var deferred = q.defer();
sql.query("select time from events where eventtype ="+ event,
function(result,error){
if(error){
deferred.reject(error);
}else{
deferred.resolve({time:result.time});
}
});
return deferred.promise;
}
In the above code I'm able to push 1st set of data(["write","write","write","write"]) into postgresql database but not 2nd set(["write","write","read","write"]). Whenever I get read event in a set, I'm getting empty object. What would be the problem? For the above example I should have 8 records in postgresql, but I see only first array's four data.
More Info: insertIntoPostgreSQL() function will get list of objects and insert each object into database. This operation is workin fine.
I tried use two console stmt as
console.log("sql result:"result);
deferred.resolve({time:result.time});
and
console.log("Before Insert:"JSON.stringigy(resultArray));
q.all(resultArray).then(function(finalResul‌​t){
I get result as in the following order.
Before insert:[{"source":{}},{"source":{}},{"source":{}},{"source":{}}]
Before insert:[{}]
sql result:{time:"2015-07-10 00:00:00"}
As Roamer-1888 mentioned in the comments, definitely add an error handler for your Q.all. The basic structure of your promise seems to be fine, the error is somewhere else.
It looks like the result in your sql.query callback is not quite what you expect, as it is read as undefined. Because of an error there, Q.all is not getting resolved, therefore nothing gets added to your database.

node.js, pg module and done() method

Using the pg module and clients pool I need to call done() method in order to return the client into clients pool.
Once I connect to the server, I add SQL query client’s query queue and I start handling the result asynchronously row by row in row event:
// Execute SQL query
var query = client.query("SELECT * FROM categories");
// Handle every row asynchronously
query.on('row', handleRow );
When I should call done() method?
Should I call it once I receive the end event and all rows are processed or I can call it immediately after I add SQL query to the client’s query queue?
Going from an example on this project's page (https://github.com/brianc/node-pg-query-stream), I'd recommend calling it when you get the end event.
This makes sense, because you're not done with it until you're received the last row. If someone else got that same connection and tried using it, that would likely create odd errors.
The former makes sense: you would want to call it once you know you have processed all rows for your query.
// your DB connection info
var conString = "pg://admin:admin#localhost:5432/Example";
var pg = require("pg");
var client = new pg.Client(conString);
client.connect();
// Your own query
var query = client.query("SELECT * FROM mytable");
query.on("row", function (row, result) {
// do your stuff with each row
result.addRow(row);
});
query.on("end", function (result) {
// here you have the complete result
console.log(JSON.stringify(result.rows, null, 2));
// end when done ;)
client.end();
});

Node.js and Redis

I am trying to link up a redis database with a Node.js application I am building to be able to store comments about items. I'm using the node_redis library to handle the connection. When I attempt to retrieve the comments out of the database however only "[true]" is returned. For testing purposes I have stuffed everything into one method and I have hardcoded the values in, but I still receive "[true]".
exports.getComment = function (id){
var comments = new Array();
rc.hmset("hosts", "mjr", "1", "another", "23", "home", "1234");
comments.push(rc.hgetall("hosts", function (err, obj) {
var comment = new Array();
if(err){
comment.push("Error");
} else {
comment.push(obj);
}
return comment;
}));
return comments;
}
Updated the code according to the tutorial and here is the result:
Retrieving the comment:
exports.getComment = function (id, callback){
rc.hgetall(id, callback);
}
Adding the comment:
exports.addComment = function (id, area, content, author){
//add comment into the database
rc.hmset("comment",
"id", id,
"area", area,
"content", content,
"author" , author,
function(error, result) {
if (error) res.send('Error: ' + error);
});
//returns nothing
};
Code to render:
var a = [];
require('../lib/annotations').addComment("comment");
require('../lib/annotations').getComment("comment", function(comment){
a.push(comment)
});
res.json(a);
Node.js is asynchronous. Which means it asynchronously does the redis stuff, and then gets the result back in the callback function.
I suggest you read this tutorial and fully understand it before getting further: http://howtonode.org/node-redis-fun
Basically, this way won't work:
function getComments( id ) {
var comments = redis.some( action );
return comments;
}
But it has to be this way:
function getComments( id, callback ) {
redis.some( action, callback );
}
This way, you use the API like this:
getComments( '1', function( results ) {
// results are available!
} );
The problem lays within the actual Redis-Node library when the call to addComment is made as it is below.
require('../lib/annotations').getComment("comment", function(comment){
a.push(comment)
});
This call is missing an argument in the callback function. The first argument is the error report which should return null if everything is ok, the second is the actual data. So it should be structured like the call below.
require('../lib/annotations').getComment("comment", function(comment){
a.push(err, comment)
});

Resources