How to get the mysql command from mysql2/promise in node js - node.js

For the code below how can I get the full query Like
INSERT INTO TABLE(.........) VALUES(........);
I can create the function to create command but it would look bad So Is there way of getting the query used here
I have used mysql2/promise
let command = "INSERT INTO users(joined,name,username,profile,DOB,facebookLink,twitterLink,instagramLink,interests,bio) VALUES (?) ;";
try {
let userPosted = await (await connection).query(command, userArr);
if (userPosted[0].affectedRows === 0) {
return errors.request.NOT_FOUND;
}
} catch (err) {
console.log(command);
console.log(err);
return false;
}
/*
Suppose for small command like
let command1 = "INSERT INTO tableA(a,b) VALUES(?);
let tableposted = await (await connection).query(command, [1,2]);
Here I want to get the command used in upper query ↑↑↑↑↑↑
Like INSERT INTO tableA(a,b) VALUES(1,2);
*/

Related

Correct way to organise this process in Node

I need some advice on how to structure this function as at the moment it is not happening in the correct order due to node being asynchronous.
This is the flow I want to achieve; I don't need help with the code itself but with the order to achieve the end results and any suggestions on how to make it efficient
Node routes a GET request to my controller.
Controller reads a .csv file on local system and opens a read stream using fs module
Then use csv-parse module to convert that to an array line by line (many 100,000's of lines)
Start a try/catch block
With the current row from the csv, take a value and try to find it in a MongoDB
If found, take the ID and store the line from the CSV and this id as a foreign ID in a separate database
If not found, create an entry into the DB and take the new ID and then do 6.
Print out to terminal the row number being worked on (ideally at some point I would like to be able to send this value to the page and have it update like a progress bar as the rows are completed)
Here is a small part of the code structure that I am currently using;
const fs = require('fs');
const parse = require('csv-parse');
function addDataOne(req, id) {
const modelOneInstance = new InstanceOne({ ...code });
const resultOne = modelOneInstance.save();
return resultOne;
}
function addDataTwo(req, id) {
const modelTwoInstance = new InstanceTwo({ ...code });
const resultTwo = modelTwoInstance.save();
return resultTwo;
}
exports.add_data = (req, res) => {
const fileSys = 'public/data/';
const parsedData = [];
let i = 0;
fs.createReadStream(`${fileSys}${req.query.file}`)
.pipe(parse({}))
.on('data', (dataRow) => {
let RowObj = {
one: dataRow[0],
two: dataRow[1],
three: dataRow[2],
etc,
etc
};
try {
ModelOne.find(
{ propertyone: RowObj.one, propertytwo: RowObj.two },
'_id, foreign_id'
).exec((err, searchProp) => {
if (err) {
console.log(err);
} else {
if (searchProp.length > 1) {
console.log('too many returned from find function');
}
if (searchProp.length === 1) {
addDataOne(RowObj, searchProp[0]).then((result) => {
searchProp[0].foreign_id.push(result._id);
searchProp[0].save();
});
}
if (searchProp.length === 0) {
let resultAddProp = null;
addDataTwo(RowObj).then((result) => {
resultAddProp = result;
addDataOne(req, resultAddProp._id).then((result) => {
resultAddProp.foreign_id.push(result._id);
resultAddProp.save();
});
});
}
}
});
} catch (error) {
console.log(error);
}
i++;
let iString = i.toString();
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(iString);
})
.on('end', () => {
res.send('added');
});
};
I have tried to make the functions use async/await but it seems to conflict with the fs.openReadStream or csv parse functionality, probably due to my inexperience and lack of correct use of code...
I appreciate that this is a long question about the fundamentals of the code but just some tips/advice/pointers on how to get this going would be appreciated. I had it working when the data was sent one at a time via a post request from postman but can't implement the next stage which is to read from the csv file which contains many records
First of all you can make the following checks into one query:
if (searchProp.length === 1) {
if (searchProp.length === 0) {
Use upsert option in mongodb findOneAndUpdate query to update or upsert.
Secondly don't do this in main thread. Use a queue mechanism it will be much more efficient.
Queue which I personally use is Bull Queue.
https://github.com/OptimalBits/bull#basic-usage
This also provides the functionality you need of showing progress.
Also regarding using Async Await with ReadStream, a lot of example can be found on net such as : https://humanwhocodes.com/snippets/2019/05/nodejs-read-stream-promise/

UI freezes for a short moment while trying to execute multiple commands in a gnome shell extension

Original question: Multiple arguments in Gio.Subprocess
So currently I'm trying to execute multiple asynchronous commands in my gnome-shell-extension via Gio.Subprocess. This works fine, if I put all commands as only one chained command with && in the command vector of the Subprocess. The drawback of this solution is, that the output of the different chained commands is only updated once and the execution time may be long.
What I'm now trying to do, is to execute every command on its own at the same time. Now the output can be updated if one command only has a small interval while another one needs more time.
Let's say these are my commands, in this case I would like to execute each command every second:
let commands = {"commands":[{"command":"ls","interval":1},
let commands = {"commands":[{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}]}
Then I'm calling my refresh function for each command.
commands.commands.forEach(command => {
this.refresh(command);
})
What is happening now, is that the gnome UI is freezing almost every second, not much, but I can see my mouse cursor or scrolling stop for a very short amount of time, even though I use asynchronous communication.
What I have found out from debugging is that it seems to be the initialization of the Subprocess which causes the small freeze, maybe because all the commands are using it nearly at the same time?
proc.init(cancellable);
I think the documentation says that the init method is synchronous (https://developer.gnome.org/gio//2.56/GInitable.html#g-initable-init) and that there also seems to be an async version (https://developer.gnome.org/gio//2.56/GAsyncInitable.html#g-async-initable-init-async), but the Gio.Subprocess does only implement the synchronous one (https://developer.gnome.org/gio//2.56/GSubprocess.html)
So the final question is, what would be the correct way to avoid the freezing? I tried to move the init part to asynchronous function and continue with the command execution via callbacks after it is done, but with no luck. Maybe this is even the completely wrong approach though.
Whole extension.js (final updating of the output is not part of this version, just for simplicity):
const Main = imports.ui.main;
const GLib = imports.gi.GLib;
const Mainloop = imports.mainloop;
const Gio = imports.gi.Gio;
const ExtensionUtils = imports.misc.extensionUtils;
const Me = ExtensionUtils.getCurrentExtension();
let output, box, gschema, stopped;
var settings;
let commands = {"commands":[{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}]}
function init() {
//nothing todo here
}
function enable() {
stopped = false;
gschema = Gio.SettingsSchemaSource.new_from_directory(
Me.dir.get_child('schemas').get_path(),
Gio.SettingsSchemaSource.get_default(),
false
);
settings = new Gio.Settings({
settings_schema: gschema.lookup('org.gnome.shell.extensions.executor', true)
});
box = new St.BoxLayout({ style_class: 'panel-button' });
output = new St.Label();
box.add(output, {y_fill: false, y_align: St.Align.MIDDLE});
Main.panel._rightBox.insert_child_at_index(box, 0);
commands.commands.forEach(command => {
this.refresh(command);
})
}
function disable() {
stopped = true;
log("Executor stopped");
Main.panel._rightBox.remove_child(box);
}
async function refresh(command) {
await this.updateGui(command);
Mainloop.timeout_add_seconds(command.interval, () => {
if (!stopped) {
this.refresh(command);
}
});
}
async function updateGui(command) {
await execCommand(['/bin/sh', '-c', command.command]).then(stdout => {
if (stdout) {
let entries = [];
stdout.split('\n').map(line => entries.push(line));
let outputAsOneLine = '';
entries.forEach(output => {
outputAsOneLine = outputAsOneLine + output + ' ';
});
if (!stopped) {
log(outputAsOneLine);
//output.set_text(outputAsOneLine);
}
}
});
}
async function execCommand(argv, input = null, cancellable = null) {
try {
let flags = Gio.SubprocessFlags.STDOUT_PIPE;
if (input !== null)
flags |= Gio.SubprocessFlags.STDIN_PIPE;
let proc = new Gio.Subprocess({
argv: argv,
flags: flags
});
proc.init(cancellable);
let stdout = await new Promise((resolve, reject) => {
proc.communicate_utf8_async(input, cancellable, (proc, res) => {
try {
let [ok, stdout, stderr] = proc.communicate_utf8_finish(res);
resolve(stdout);
} catch (e) {
reject(e);
}
});
});
return stdout;
} catch (e) {
logError(e);
}
}```
It's doubtful that Gio.Initable.init() is what's causing the freeze. First some comments on the usage of GSubprocess here.
function execCommand(argv, input = null, cancellable = null) {
try {
/* If you expect to get output from stderr, you need to open
* that pipe as well, otherwise you will just get `null`. */
let flags = (Gio.SubprocessFlags.STDOUT_PIPE |
Gio.SubprocessFlags.STDERR_PIPE);
if (input !== null)
flags |= Gio.SubprocessFlags.STDIN_PIPE;
/* Using `new` with an initable class like this is only really
* necessary if it's possible you might pass a pre-triggered
* cancellable, so you can call `init()` manually.
*
* Otherwise you can just use `Gio.Subprocess.new()` which will
* do exactly the same thing for you, just in a single call
* without a cancellable argument. */
let proc = new Gio.Subprocess({
argv: argv,
flags: flags
});
proc.init(cancellable);
/* If you want to actually quit the process when the cancellable
* is triggered, you need to connect to the `cancel` signal */
if (cancellable instanceof Gio.Cancellable)
cancellable.connect(() => proc.force_exit());
/* Remember the process start running as soon as we called
* `init()`, so this is just the threaded call to read the
* processes's output.
*/
return new Promise((resolve, reject) => {
proc.communicate_utf8_async(input, cancellable, (proc, res) => {
try {
let [, stdout, stderr] = proc.communicate_utf8_finish(res);
/* If you do opt for stderr output, you might as
* well use it for more informative errors */
if (!proc.get_successful()) {
let status = proc.get_exit_status();
throw new Gio.IOErrorEnum({
code: Gio.io_error_from_errno(status),
message: stderr ? stderr.trim() : GLib.strerror(status)
});
}
resolve(stdout);
} catch (e) {
reject(e);
}
});
});
/* This should only happen if you passed a pre-triggered cancellable
* or the process legitimately failed to start (eg. commmand not found) */
} catch (e) {
return Promise.reject(e);
}
}
And notes on Promise/async usage:
/* Don't do this. You're effectively mixing two usage patterns
* of Promises, and still not catching errors. Expect this to
* blow up in your face long after you expect it to. */
async function foo() {
await execCommand(['ls']).then(stdout => log(stdout));
}
/* If you're using `await` in an `async` function that is
* intended to run by itself, you need to catch errors like
* regular synchronous code */
async function bar() {
try {
// The function will "await" the first Promise to
// resolve successfully before executing the second
await execCommand(['ls']);
await execCommand(['ls']);
} catch (e) {
logError(e);
}
}
/* If you're using Promises in the traditional manner, you
* must catch them that way as well */
function baz() {
// The function will NOT wait for the first to complete
// before starting the second. Since these are (basically)
// running in threads, they are truly running in parallel.
execCommand(['ls']).then(stdout => {
log(stdout);
}).catch(error => {
logError(error);
});
execCommand(['ls']).then(stdout => {
log(stdout);
}).catch(error => {
logError(error);
});
}
Now for the implementation:
const Main = imports.ui.main;
const GLib = imports.gi.GLib;
const Gio = imports.gi.Gio;
const ExtensionUtils = imports.misc.extensionUtils;
const Me = ExtensionUtils.getCurrentExtension();
let cancellable = null;
let panelBox = null;
let commands = {
"commands":[
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1},
{"command":"ls","interval":1}
]
};
enable() {
if (cancellable === null)
cancellable = new Gio.Cancellable();
panelBox = new St.BoxLayout({
style_class: 'panel-button'
});
// Avoid deprecated methods like `add()`, and try not
// to use global variable when possible
let outputLabel = new St.Label({
y_align: St.Align.MIDDLE,
y_fill: false
});
panelBox.add_child(outputLabel);
Main.panel._rightBox.insert_child_at_index(panelBox, 0);
commands.commands.forEach(command => {
this.refresh(command);
});
}
disable() {
if (cancellable !== null) {
cancellable.cancel();
cancellable = null;
}
log("Executor stopped");
if (panelBox !== null) {
Main.panel._rightBox.remove_child(panelBox);
panelBox = null;
}
}
async function refresh(command) {
try {
await this.updateGui(command);
// Don't use MainLoop anymore, just use GLib directly
GLib.timeout_add_seconds(0, command.interval, () => {
if (cancellable && !cancellable.is_cancelled())
this.refresh(command);
// Always explicitly return false (or this constant)
// unless you're storing the returned ID to remove the
// source later.
//
// Returning true (GLib.SOURCE_CONTINUE) or a value that
// evaluates to true will cause the source to loop. You
// could refactor your code to take advantage of that
// instead of constantly creating new timeouts each
// second.
return GLib.SOURCE_REMOVE;
});
} catch (e) {
// We can skip logging cancelled errors, since we probably
// did that on purpose if it happens
if (!e.matches(Gio.IOErrorEnum, Gio.IOErrorEnum.CANCELLED)
logError(e, 'Failed to refresh');
}
}
// `updateGui()` is wrapped in a try...catch above so it's safe to
// skip that here.
async function updateGui(command) {
let stdout = await execCommand(['/bin/sh', '-c', command.command]);
// This will probably always be true if the above doesn't throw,
// but you can check if you want to.
if (stdout) {
let outputAsOneLine = stdout.replace('\n', '');
// No need to check the cancellable here, if it's
// triggered the command will fail and throw an error
log(outputAsOneLine);
// let outputLabel = panelBox.get_first_child();
// outputLabel.set_text(outputAsOneLine);
}
}
It's hard to say what is causing the freeze you are experiencing, but I would first cleanup your Promise usage and be more explicit about how you use timeout sources, as these may be stacking every second.
If possible, you might want to group your subprocesses into a single timeout source, possible using Promise.all() to await them all at once. Overloading the event loop with pending sources and Promises could also be the cause of the freeze.

Array as sql request parameter

I handle my SQL queries like this (which works):
const sql = require("mssql/msnodesqlv8");
const conn = new sql.ConnectionPool({
database: "MyDatabase",
server: "localhost\\SQLEXPRESS",
driver: "msnodesqlv8",
options: {
trustedConnection: true
}
});
async function runSQLQuery(insertReq, query) {
try {
await conn.connect();
var result = await insertReq.query(query);
await conn.close();
return result;
} catch (ex) {
console.log(ex);
return undefined;
} finally {
if (conn.connected)
conn.close();
}
}
and create the querys like this (which also works):
exports.getClientByID = async function (ID) {
var insertReq = conn.request();
insertReq.input("ID", sql.UniqueIdentifier, ID);
const request = await runSQLQuery(insertReq, `SELECT TOP (1) * FROM ${ClientTabel} WHERE ID = #ID`);
return request.recordset[0]
};
But now I want to add an Array as Parameter like this (and this doesn't work):
exports.getUsersWithProperty = async function (properties) {
var insertReq = conn.request();
insertReq.input("properties", sql.NVarChar, properties);
const request = await runSQLQuery(insertReq, `SELECT * FROM ${ClientTabel} WHERE Property IN #properties`);
return request.recordset;
};
But with this I only get a
Request Error" Wrong Syntax near "#properties".
I guess the type sql.NVarChar is wrong but I don't know what the right type is. Whats the solution for this?
OK, for a start, you need to add brackets around the values.
An IN clause is like this:
WHERE somecolumn IN ('value1','value2','value3')
you'll also have to make sure that after your #properties string replacement is done, you end up with a statement that looks like the clause above, with the quotes and commas in the right places.
Alternately, if #properties is a string like Value1,Value2,Value3 and so on, you could pass it to a T-SQL table-valued function that returns a table like this:
WHERE somecolumn IN dbo.ExtractStringList(#StringList)

recursive nodejs mysql query

I want to execute a recursive function that retrieve data from DB. In php the code below run like a charm with 15ms to execute
function GetSubCategories($catno,&$subcats, $useactive=true){
global $dbconn;
$qid = new SSQL($dbconn, "SELECT categoryno FROM article_category WHERE parent = '$catno'".($useactive?" AND active = 'Y'":"")." ORDER BY sortorder");
if ($qid->query()){
while($catrow=$qid->fetch_array()){
$subcats[]=$catrow["categoryno"];
GetSubCategories($catrow["categoryno"],$subcats, $useactive);
}
}
}
I'm a newbie in nodejs environment and Async cause trouble in this case.
If i write the same coe in js the program exit after first iteration. I can sync the process with await but execution time explode...
I try many thing with promise like
var getSubcategoriestest = function(categoryno,subcats, useactive=true){
return new Promise(async function (resolve) {
const query = `SELECT categoryno FROM article_category WHERE ?? = ? ${useactive?" AND active = 'Y'":""} ORDER BY sortorder`
let rows = await mysqlConn.query(query,['parent',categoryno])
resolve(rows)
}).then((rows)=>{
for (row of rows){
console.log(row.categoryno)
return new Promise(async function (resolve) {
await getSubcategoriestest(row.categoryno,subcats, useactive)
resolve()
}).then(()=>{console.log('end')})
}
})
}
but nothing work fine
Any guru can help me ?
Thanks
Jeremy
I test this code
var getSubcategoriestest = async function(categoryno,subcats, useactive=true,arrPromise=[]){
let promise = new Promise(function (resolve,reject) {
const query = `SELECT categoryno FROM article_category WHERE ?? = ? ${useactive?" AND active = 'Y'":""} ORDER BY sortorder`
mysqlConn.query(query,['parent',categoryno]).then((rows)=>resolve(rows)).catch(err=>console.log(err))
}).then((rows)=>{
for (row of rows){
getSubcategoriestest(row.categoryno,subcats, useactive,arrPromise).then((rows)=>{subcats.push(row.categoryno)})
}
return row.categoryno
})
arrPromise.push(promise)
Promise.all(arrPromise).then(function() {
console.log("promise all,")
return
}).catch(err=>console.log(err))
}
but function end always after first iteration. Promise.all it's call many times (cause bind at each iteration i suppose)... headache,headache,headache
Here we go
var getSubcategoriestest = function (categoryno,subcats) {
const query = `SELECT c FROM ac WHERE ?? = ? ORDER BY sortorder`
return mysqlConn.query(query,['parent',categoryno]).then(rows => {
return Promise.all(rows.map(row => {
subcats.push(row.categoryno);
return getSubcategoriestest(row.categoryno, subcats,useactive);
}));
})}
rows.map make an array of promise cause getSubcategoriestest return a promise. You can add a then after promise.all.

I can't receive data from custom module in node.js

I wrote a module called accountManager.js
var sqlite3 = require('sqlite3');
var db = new sqlite3.Database("./users.db");
exports.userExists = function userExists(nickName) {
var stmt = 'SELECT * FROM users WHERE login="' + nickName + '"';
db.each(stmt,function(err,row) {
if(row) {
if(row.login==nickName) return true;
else return false;
}
});
}
In my main app.js file I've got
var accountManager = require('./lib/accountManager');
console.log(accountManager.userExists('user1'));
This app says 'undefined' in console... I checked that module is working fine, I guess it's problem with callback? Please, give me some help, I don't understand what is wrong with this code...
You need to understand how asynchronous functions and callbacks work.
Basically you cannot return anything inside the callback but need to invoke another callback which you pass to userExists.
var sqlite3 = require('sqlite3');
var db = new sqlite3.Database("./users.db");
exports.userExists = function userExists(nickName, cb) {
var stmt = 'SELECT * FROM users WHERE login="' + nickName + '"';
db.each(stmt,function(err,row) {
if(row) {
cb(row.login == nickName);
}
});
}
To use it:
accountManager.userExists('user1', function(found) {
console.log(found);
});
Besides that, your code has a gaping SQL injection hole and might not do what you intend to do. Here's a fixed version of the userExists function:
exports.userExists = function userExists(nickName, cb) {
var stmt = 'SELECT COUNT(*) AS cnt FROM users WHERE login = ?';
db.get(stmt, nickName, function(err, row) {
cb(row.cnt > 0);
});
};
Why is this better?
You do not interpolate the value in the SQL string (which is bad, you would have to escape stuff to avoid SQL injection). Passing it separately is much cleaner and better
You just want to know if a user exists. So retrieve the count (which will be exactly one row). If it's not zero the user exists.
Now the callback is always invoked. In the first example that is more closely based on your code it would only be invoked in case a user has been found - most likely not what you wanted.
You're returning a value from within the callback from db.each. However, this value is not returned by the outer function (userExists), which may return before the function passed to db.each is ever called.
You may want to provide a callback to the userExists function, like so:
exports.userExists = function (nickName, cb) {
var stmt = 'SELECT * FROM users WHERE login="' + nickName + '"';
var found=false;
db.each(stmt,function(err,row) {
if(row) {
if(row.login==nickName) {
found=true;
cb(true);
}
}
}, function () {
if (!found) {
cb(false);
}
});
}
Then, call it like:
var accountManager = require('./lib/accountManager');
accountManager.userExists('user1', function (found) {
console.log(found);
});

Resources