I am new to node.js and I'm having an issue with connecting to an mssql database and preparing/executing a prepared statement.
I have the following code:
this.connectionPool.connect().then(pool => {
// Create prepared statement
stmt = new mssql.PreparedStatement(pool);
// PREPARE
stmt.prepare(command, err => {
//TODO: check for errors in prepare
// EXECUTE
stmt.execute((err, result) => {
//TODO: check for errors in execute
// UNPREPARE
stmt.unprepare(err => {
//TODO: check for errors in unprepare
});
console.log(`Rows affected: ${stmt.rowsAffected}`);
console.log(`Result: ${result}`);
return result;
});
});
}).catch(err => {
console.log(`Connection pool error: ${err}`);
}).finally(() => {
// Close connection
console.log("Closing connection.");
if (this.connectionPool.connected) {
this.connectionPool.close();
}
});
I'm finding that the finally() promise is executed at the same time as then(), which I wouldn't have expected. This is causing the connection to be closed before the statement can be prepared.
ConnectionError: connection is closed
How can I ensure the connection is closed only after the statement has been executed?
The callbacks are executed async, so you need to add the result of the execute in the promise chain:
this.connectionPool.connect().then(pool => {
// Create prepared statement
stmt = new mssql.PreparedStatement(pool)
// return a promise to add it to the promise chain
return new Promise((resolve, reject) => {
stmt.prepare(command, err => {
// TODO: check for errors in prepare
if (err) {
reject(err)
return
}
// EXECUTE
stmt.execute((err, result) => {
// TODO: check for errors in execute
if (err) {
reject(err)
return
}
// UNPREPARE
stmt.unprepare(err => {
// TODO: check for errors in unprepare
if (err) {
reject(err)
}
})
console.log(`Rows affected: ${stmt.rowsAffected}`)
console.log(`Result: ${result}`)
resolve(result)
})
})
})
}).catch(err => {
console.log(`Connection pool error: ${err}`)
}).finally(() => {
// Close connection
console.log('Closing connection.')
if (this.connectionPool.connected) {
this.connectionPool.close()
}
})
If stmt.prepare support promise out of the box, you may return it without wrapping it in a new Promise
All promise version:
this.connectionPool.connect().then(pool => {
// Create prepared statement
stmt = new mssql.PreparedStatement(pool)
return stmt.prepare(command)
.then(() => stmt.execute())
.then((result) => {
console.log(`Rows affected: ${stmt.rowsAffected}`)
console.log(`Result: ${result}`)
})
.finally(() => stmt.unprepare())
}).catch(err => {
console.log(`Connection pool error: ${err}`)
}).finally(() => {
// Close connection
console.log('Closing connection.')
if (this.connectionPool.connected) {
this.connectionPool.close()
}
})
The .prepare(), .execute() and .unprepare() methods all accept callbacks. Unless these methods are "promisified", the this.connectionPool.connect().then().catch().finally() chain has no means of being informed of the outcome of the prepare-execute-unprepare process.
The prepare-execute-unprepare process can be promisified as follows:
this.connectionPool.connect()
.then(pool => {
let stmt = new mssql.PreparedStatement(pool);
return new Promise((resolve, reject) => {
stmt.prepare(command, err => {
if(err) {
reject(err);
} else {
stmt.execute((err, result) => {
if(err) {
reject(err);
} else {
stmt.unprepare(err => {
if(err) {
reject(err);
} else {
resolve(result);
}
});
}
});
}
});
});
})
.catch(err => {
console.log(`Prepared statement error: ${err.message}`);
})
.finally(() => {
if (this.connectionPool.connected) {
this.connectionPool.close();
}
});
Now, the catch() and finally() blocks will be sequenced as you expect.
Related
By using mysql2 with promise, I have the controller.js as below :
exports.company_add = (req, res) => {
company_model.company_add(admin_email, admin_info).then((result) => { ... })
.catch((err) => { ... })
}
And the model.js as below :
exports.company_add = (admin_email, admin_info) => {
return new Promise((resolve, reject) => {
connectionPool.getConnection((connectionError, connection) => {
if (connectionError) reject(connectionError);
return connection.promise().query('SELECT * FROM admin WHERE admin_email = ?', [admin_email])
.then(([rows, field]) => {
if (rows.length) reject('Email exist');
else return connection.promise().query('INSERT INTO companydb.admin SET ?', [admin_info])
})
.then((result) => {
console.log('result')
if (result[0].affectedRows === 1) resolve(result);
else reject('INSERT FAIL');
})
.catch((err) => {
reject(err);
})
.finally(() => {
connection.release();
})
});
});
};
I wonder how I escape from the .then() statement when the line if (rows.length) occurred, since there is no need to continue with the next .then() for this case if the email already exist in the database. Do I need to throw it and add a .catch() just before the next .then() executed?
One cannot really break out of a then chain other than by nesting or throwing an exception. Yes, in this case you probably should throw, but you don't need to .catch it when you avoid the Promise constructor antipattern and just make it a chained promise:
exports.company_add = (admin_email, admin_info) => {
return connectionPool.getConnection().then(connection => {
return connection.promise().query('SELECT * FROM admin WHERE admin_email = ?', [admin_email])
.then(([rows, field]) => {
if (rows.length)
throw new Error('Email exist');
else
return connection.promise().query('INSERT INTO companydb.admin SET ?', [admin_info])
})
.then((result) => {
console.log('result')
if (result[0].affectedRows !== 1)
throw new Error('INSERT FAIL');
return result;
})
.finally(() => {
connection.release();
})
}, err => {
console.error(err);
throw new Error('CONNECTION FAIL');
});
};
Here is my code that I'm trying to implement using node-postgres:
return pool.connect().then((client) => {
// if (err) {
// throw err;
// }
let updateTable = [];
console.log('ABOUT TO RUN QUERY');
// updateTable.executeQuery()
return client.query(sqlString, updateTable, (error, result) => {
console.log('RUNNING QUERY');
if (error) {
throw error;
}
console.log('RUNNING QUERY2');
// code
console.log('RUNNING QUERY3');
for (let i = 0; i < result.rows.length; i++) {
console.log('RUNNING QUERY4');
let row = result.rows[i];
// process data
}
console.log('RUNNING QUERY5');
// write to file
console.log('RUNNING QUERY6');
return client.release();
})
.then(() => {
console.log('CLIENT RELEASED');
if (!fileOnly) {
if (optionNode != null) {
console.log('ABOUT TO RUN QUERY #2');
// statsTable.executeQuery()
let statResults = client.query(withStatsString, statsTable, (err, res) => {
if (err) {
console.log(err);
return err;
}
return client.release();
});
//}
}
}
return pool.end();
})
.then(() => {
return reportObject;
})
.catch(e => {
throw e;
});
})
.catch((e) => {
console.log(e);
return reportObject;
});
When I run this code, I can see:
RUNNING QUERY
RUNNING QUERY2
RUNNING QUERY3
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY4
RUNNING QUERY5
RUNNING QUERY6
However, it never reaches the then where the client is released. I will print out literally right before ending the Promise, but will hang infinitely and never resolve. How do I fix my Promise chain?
EDIT: I was able to fix the client.query, but in the index.js, my program is hanging on completion. This is the code:
ReportUtil.sendInReport('Monthly_2017_01', JSON.parse(reportRequest), 100, null)
.then(result => {
console.log(result.status);
console.log(result.header);
console.log(result.data);
return Promise.resolve();
}).finally(() => {});
After this code, it just hangs and the program never ends. How do I escape this Promise chain?
As per the docs on the release callback:
You must call the releaseCallback or client.release (which points to
the releaseCallback) when you are finished with a client. If you
forget to release the client then your application will quickly
exhaust available, idle clients in the pool and all further calls to
pool.connect will timeout with an error or hang indefinitely if you
have connectionTimeoutMills configured to 0.
Try not returning the client.release (like it is a promise - which it is not) but just calling it lower in the promise chain like this:
return pool.connect().then((client) => {
let updateTable = [];
console.log('ABOUT TO RUN QUERY');
// updateTable.executeQuery()
return client.query(sqlString, updateTable)
.then(result => {
console.log('RUNNING QUERY');
console.log('RUNNING QUERY2');
// code
console.log('RUNNING QUERY3');
for (let i = 0; i < result.rows.length; i++) {
console.log('RUNNING QUERY4');
let row = result.rows[i];
// process data
}
console.log('RUNNING QUERY5');
// write to file
console.log('RUNNING QUERY6');
})
.then(() => {
if (!fileOnly) {
if (optionNode != null) {
console.log('ABOUT TO RUN QUERY #2');
// statsTable.executeQuery()
let statResults = client.query(withStatsString, statsTable)
.then(res => {
});
}
}
client.release();
console.log('CLIENT RELEASED');
return pool.end();
})
.then(() => {
return reportObject;
})
.catch(e => {
client.release();
throw e;
});
})
.catch((e) => {
console.log(e);
return reportObject;
});
And for exiting your index.js using a Promise chain:
ReportUtil.sendInReport('Monthly_2017_01', JSON.parse(reportRequest), 100, null)
.then(result => {
console.log(result.status);
console.log(result.header);
console.log(result.data);
})
.catch(e => {
console.log(e);
})
.finally(() => {
process.exit();
});
I create a promise function to processing a long-time query task. Some time the task will block for hours. I want set a time out to stop the task. Below is the code.
It can return error message correctly, but it still running connection.execute() for long time before stop. So how can stop it immediately when it return reject message?
Thanks!
function executeQuery(connection, query) {
return new Promise((resolve, reject) => {
"use strict";
//long time query
connection.execute(query, function (err, results) {
if (err) reject('Error when fetch data');
else resolve(results);
clearTimeout(t);
});
let t = setTimeout(function () {
reject('Time Out');
}, 10);
})
(async () => {
"use strict";
oracle.outFormat = oracle.OBJECT;
try {
let query = fs.readFileSync("query.sql").toString();
let results = await executeQuery(connection, query);
console.log(results.rows);
} catch (e) {
console.log(`error:${e}`);
}
So how can stop it immediately when it return reject message?
According to the docs, you can use connection.break:
return new Promise((resolve, reject) => {
connection.execute(query, (err, results) => {
if (err) reject(err);
else resolve(results);
clearTimeout(t);
});
const t = setTimeout(() => {
connection.break(reject); // is supposed to call the execute callback with an error
}, 10);
})
Make sure to also release the connection in a finally block.
Try this (using bluebird promises):
var execute = Promise.promisify(connection.execute);
function executeQuery(connection, query) {
return execute.call(connection, query)
.timeout(10000)
.then(function (results) {
// handle results here
})
.catch(Promise.TimeoutError, function (err) {
// handle timeout error here
});
.catch(function (err) {
// handle other errors here
});
};
If this still blocks, there's a possibility that the database driver you are using is actually synchronous rather than asynchronous. In that case, that driver would be incompatible with the node event loop and you may want to look into another one.
As Bergi mentioned, you'll need to use the connection.break method.
Given the following function:
create or replace function wait_for_seconds(
p_seconds in number
)
return number
is
begin
dbms_lock.sleep(p_seconds);
return 1;
end;
Here's an example of its use:
const oracledb = require('oracledb');
const config = require('./dbConfig.js');
let conn;
let err;
let timeout;
oracledb.getConnection(config)
.then((c) => {
conn = c;
timeout = setTimeout(() => {
console.log('Timeout expired, invoking break');
conn.break((err) => {
console.log('Break finished', err);
});
}, 5000);
return conn.execute(
`select wait_for_seconds(10)
from dual`,
[],
{
outFormat: oracledb.OBJECT
}
);
})
.then(result => {
console.log(result.rows);
clearTimeout(timeout);
})
.catch(err => {
console.log('Error in processing', err);
if (/^Error: ORA-01013/.test(err)) {
console.log('The error was related to the timeout');
}
})
.then(() => {
if (conn) { // conn assignment worked, need to close
return conn.close();
}
})
.catch(err => {
console.log('Error during close', err)
});
Keep in mind that the setTimeout call is just before the execute (because of the return statement). That timeout will start counting down immediately. However, the execute call isn't guaranteed to start immediately as it uses a thread from the thread pool and it may have to wait till one is available. Just something to keep in mind...
I am trying to download tracks via the soundcloud API, and then launch a callback once an indeterminant amount of tracks is downloaded. When I run the below code, I see "All done" being console logged before anything else, even though I intend for it to be the last thing... What am I doing wrong?
// Deps
import fs from 'fs'
import SC from 'node-soundcloud'
import request from 'request'
// Write mp3 function
function writeMP3(track) {
return new Promise((resolve, reject) => {
console.log('Starting download: ', track.title)
request.get(track.download_url)
.on('error', err => {
// reject('Download error: ', err)
})
.on('finish', () => {
() => resolve('Download complete')
})
.pipe(fs.createWriteStream(`./data/temp/${track.title}_${track.user.username}.mp3`))
})
}
async function asyncTrackFetch(track) {
return await writeMP3(track)
}
// Array of promises to callback upon
const trackActions = []
SC.init({
id: 'MY_ID',
secret: 'MY_SECRET'
})
SC.get('/tracks', (err, tracks) => {
if (err) {
throw new Error(err)
} else {
console.log('Tracks fetched: ', tracks.length)
tracks.map(track => {
if (track.downloadable) {
console.log('downloadable')
trackActions.push(asyncTrackFetch(track))
}
})
}
})
// Perform requests async
Promise.all(trackActions).then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
Promise.all(trackActions) waits on whatever promises are in trackActions, but trackActions is empty at the time you make the call. You're only adding promises to the array after your SC.get callback gets called.
Try putting your Promise.all... block inside the SC.get callback like this:
SC.get('/tracks', (err, tracks) => {
if (err) {
throw new Error(err)
} else {
console.log('Tracks fetched: ', tracks.length)
tracks.map(track => {
if (track.downloadable) {
console.log('downloadable')
trackActions.push(asyncTrackFetch(track))
}
})
Promise.all(trackActions).then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
}
})
It's worth mentioning as well that your line throw new Error(err) will crash the program since there's nowhere for that error to be caught.
As Antonio Val mentioned, there are better ways to do this. If you promisify the node-soundcloud library then the last part of your code could look like this:
SC.get('/tracks').then(tracks => {
// No need for trackedActions array.
return Promise.all(tracks.filter(track => track.downloadable)
.map(track => asyncTrackFetch(track)))
}).then(fetchedTracks => {
console.log('All done fetching tracks', fetchedTracks)
}).catch(err => {
// Handle error.
})
Or inside an async function,
try {
const tracks = await SC.get('/tracks')
const fetchPromises = tracks
.filter(track => track.downloadable)
.map(track => asyncTrackFetch(track))
const fetchedTracks = await Promise.all(fetchPromises)
console('All done fetching tracks.', fetchedTracks)
} catch (err) {
// Handle error
}
I think the easiest way would be to move Promise.all after tracks.map loop finished.
A more elegant solution would be to promisify SC.get as well and use async await along all your code.
UPDATE:
Couldn't test it so not sure if it works, but it would be something like this:
import fs from 'fs'
import SC from 'node-soundcloud'
import request from 'request'
function writeMP3(track) {
return new Promise((resolve, reject) => {
console.log('Starting download: ', track.title)
request.get(track.download_url)
.on('error', err => {
// reject('Download error: ', err)
})
.on('finish', () => {
() => resolve('Download complete')
})
.pipe(fs.createWriteStream(`./data/temp/${track.title}_${track.user.username}.mp3`))
})
}
function getTracks() {
return new Promise((resolve, reject) => {
SC.get('/tracks', (err, tracks) => {
if (err) {
return reject(err)
}
console.log('Tracks fetched: ', tracks.length)
resolve(tracks)
})
})
}
SC.init({
id: 'MY_ID',
secret: 'MY_SECRET'
})
With async await:
async function start() {
const tracks = await getTracks();
for (let track of tracks) {
await writeMP3(track)
}
}
start()
.then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
.catch((err) => {
// insert error handler here
})
If you just want to use Promises:
getTracks
.then((tracks) => {
const promiseArray = tracks.map((track) => {
return writeMP3(track)
})
return Promise.all(promiseArray)
})
.then(() => {
console.log('All done')
console.log(fs.readdirSync('./data/temp'))
})
.catch((err) => {
// insert error handler here
})
I'd like to aggregate data from MongoDB in NodeJS with promisified functions.
My script with dump are here https://github.com/network-spy/lego
Little description: there are 2 collections in database: "zip" and "restaurants". "zip" contains zip codes of locations and "restaurants" contains information about restaurants with zip codes. So script should create new collection "stat" and fill it with documents like:
{"zip_code" : "01002", "restaurants" : [ list of restaurants ] }
The problem is that in "zip" collection there are 29353 documents, but after script processing I get "stat" collection with 29026 documents(sometimes count of documents can change).
I guess it's because of broken synchronization somewhere in my JS code. Could you look please at my code and advice anything how to repair it?
const MongoClient = require('mongodb').MongoClient;
const mongoDbUrl = 'mongodb://127.0.0.1:27017/world';
MongoClient.connect(mongoDbUrl, function(err, db) {
if (err) {
console.log(err);
return;
}
console.log("Connected to server.");
clearStat(db).then(
result => {
console.log(result);
processZips(db).then(
result => {
console.log(result);
closeMongoDBConnection(db);
},
error => {
console.log(error);
closeMongoDBConnection(db);
}
);
},
error => {
console.log(error);
closeMongoDBConnection(db);
}
);
});
let closeMongoDBConnection = (db) => {
db.close();
console.log("Disconnected from server.");
};
let clearStat = (db) => {
return new Promise((resolve, reject) => {
db.collection('stat').deleteMany({}, function(err, results) {
if (err) {
reject(err);
}
resolve('Stat data cleared');
});
});
};
let processZips = (db) => {
return new Promise((resolve, reject) => {
db.collection('zip').find({}, {"_id":1}).each((err, zipCode) => {
if (zipCode == null) {
resolve('Zips precessed');
} else if (err) {
reject(err);
} else {
findRestaurantsByZip(db, zipCode._id).then(
result => {
insertToStat(db, zipCode._id, result).then(
result => {
console.log('Inserted: ');
console.dir(result);
},
error => {
reject(error);
}
);
},
error => {
reject(error);
}
);
}
});
});
};
let findRestaurantsByZip = (db, zipCode) => {
return new Promise((resolve, reject) => {
db.collection('restaurant').find({"address.zipcode": zipCode}).toArray((err, restaurants) => {
if (err) {
reject(err);
}
resolve(restaurants);
});
});
};
let insertToStat = (db, zip, restaurants) => {
return new Promise((resolve, reject) => {
let statDocument = {};
statDocument.zip_code = zip;
statDocument.restaurants = restaurants;
db.collection('stat').insertOne(statDocument).then(
result => {
resolve(statDocument);
},
error => {
reject(error);
}
);
});
};
Firstly, a simplification of your processZips function. This is functionally identical to your code but uses Promise chaining rather than nested Promises
let processZips = (db) => new Promise((resolve, reject) =>
db.collection('zip').find({}, {"_id":1}).each((err, zipCode) => {
if (zipCode == null) {
resolve('Zips precessed');
} else if (err) {
reject(err);
} else {
findRestaurantsByZip(db, zipCode._id)
.then(result => insertToStat(db, zipCode._id, result))
.then(result => console.log('Inserted: ', result))
.catch(error => reject(error));
}
})
);
The problem may be (I can't test anything) that you resolve the processZips promise at the end of the .each processing. This "triggers" the .then that closes the database. However, due to the asynchronous find/insert code it may well be that some of that is "in progress" at the time. I don't profess to know mongodb well, so I don't know what closing the db while processing is still active would do - seems likely that's the reason why you're output data is "short"
So, there's two ways to approach this
1 - process each zipCode in series, i.e. each find/insert waits for the previous to complete, and then resolve when last zipCode is done
let processZips = (db) => {
// set p to a resolved Promise so the first find/insert will kick off
let p = Promise.resolve();
return new Promise((resolve, reject) =>
db.collection('zip').find({}, {"_id":1}).each((err, zipCode) => {
if (zipCode == null) {
// wait for last insert to complete before resolving the Promise
resolve(p.then(() => resolve('Zips precessed'))); // see note 1, 2
} else if (err) {
reject(err);
} else {
// wait for previous insert to complete before starting new find/insert
p = p
.then(() => findRestaurantsByZip(db, zipCode._id))
.then(result => insertToStat(db, zipCode._id, result))
.then(result => console.log('Inserted: ', result)); // see note 1
}
})
);
};
With this code, as soon as a find/insert rejects, no more find/insert will actually be performed
2 - process each code in "parallel", i.e. kick off all the find/insert and then resolve when all zipCode are done
let processZips = (db) => {
// create an array for all the find/insert Promises
let p = [];
return new Promise((resolve, reject) =>
db.collection('zip').find({}, {"_id":1}).each((err, zipCode) => {
if (zipCode == null) {
// wait for all find/insert to complete before resolving this Promise
resolve(Promise.all(p).then(() => 'Zips precessed')); // see note 1, 2
} else if (err) {
reject(err);
} else {
p.push(findRestaurantsByZip(db, zipCode._id)
.then(result => insertToStat(db, zipCode._id, result))
.then(result => console.log('Inserted: ', result))
); // see note 1
}
})
);
};
The one caveat with the second method is, like in your original code, if one of the find/insert fails that wont stop subsequent find/insert from processing.
You'll notice that there seems to be a lack of error handling compared to your original code. This code uses the 2 "features" of promises.
rejections will "flow through" the promise chain,
if you resolve a promise with a rejected promise, it is identical to rejecting the promise.