I'm new to using node js, so it's very likely I misunderstand the concept of "promise" and "callback hell". In any case, I need suggestions on how to avoid the following code:
var Sequelize = require('sequelize');
var DB = new Sequelize('project1db', 'john', 'password123', {
host: 'localhost',
dialect: 'mysql'
});
var DB_PREFIX = 't_';
DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'user` ( ' +
'`user_id` int(11) UNSIGNED NOT NULL' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8;',{type: DB.QueryTypes.RAW})
.then(function(results) {
DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'organization` ( ' +
'`organization_id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ', {type:DB.QueryTypes.RAW})
.then(function(results) {
DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'user_organization` ( ' +
'`user_id` int(11) UNSIGNED NOT NULL ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ')
.then(function(){
DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'content` ( ' +
'`content_id` int(11) UNSIGNED NOT NULL ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ', {type:DB.QueryTypes.RAW})
.then(function(){
// more queries
}).catch(function(err){console.log(err);});
}).catch(function(err){console.log(err);});
}).catch(function(err){console.log(err);});
}).catch(function(err){console.log(err);});
Ignore the fact that I'm creating tables with SQL instead of using Sequelize migration scripts, because I'm just trying to illustrate the point that I have A LOT of mysql queries that should run in series. If a query fails, then I need to stop the entire script and not let the subsequent .then() function fire. In my Sequelize code, I achieved this by nesting a lot of raw query function calls, then and catch statements. This is going to be very difficult to troubleshoot if I have 100 of these nested callback statements.
Are there alternatives for me to consider beside nesting all these callback functions?
Sequelize uses (a modified version of) the bluebird promises library, which means that this should work:
var Promise = Sequelize.Promise;
Promise.each([
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'user` ( ' +
'`user_id` int(11) UNSIGNED NOT NULL' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8;',
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'organization` ( ' +
'`organization_id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ',
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'user_organization` ( ' +
'`user_id` int(11) UNSIGNED NOT NULL ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ',
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'content` ( ' +
'`content_id` int(11) UNSIGNED NOT NULL ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ',
], function runQuery(query) {
return DB.query(query, { type: DB.QueryTypes.RAW });
}).then(function() {
console.log('all done');
}).catch(function(err) {
console.log(err);
});
It uses the static version of .each(), which will iterate over the array items sequentially, pass each to the runQuery iterator (which returns a promise), and will stop when a promise is rejected.
Did you not already answer your own question by not using migration scripts? By default, you`d want to run migration scripts to set up your database and have it logged so you know when you migrated or when you last migrated.
If you need sequential SQL commands, you can still do that within 1 command. The query will run sequential anyway. If you want to have every single table be a model, make migration scripts for that model, don`t do it like this.
In order to avoid "promise hell" - which is just the same problem as "callback hell" - one can return each Promise inside a top-level thenable:
DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'user` ( ' +
'`user_id` int(11) UNSIGNED NOT NULL' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8;',{type: DB.QueryTypes.RAW})
.then(function(results) {
return DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'organization` ( ' +
'`organization_id` int(11) UNSIGNED NOT NULL AUTO_INCREMENT ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ', {type:DB.QueryTypes.RAW})
}).then(function(results) {
return DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'user_organization` ( ' +
'`user_id` int(11) UNSIGNED NOT NULL ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ')
}).then(function(){
return DB.query(
'CREATE TABLE IF NOT EXISTS `'+DB_PREFIX+'content` ( ' +
'`content_id` int(11) UNSIGNED NOT NULL ' +
') ENGINE=InnoDB DEFAULT CHARSET=utf8; ', {type:DB.QueryTypes.RAW})
}).then(function(){
// more queries
})
.catch(function(err){console.log(err);});
The Promise system allows chaining in this fashion, which removes the need for the high level of nesting and indentation. Note also that only one catch is required - if a thenable fails, it skips forward to the next available catch().
Related
This is the raw query, which is working -
Insert into members (activity, emp_id, plan_id) VALUES ('admin', 21937, 2463) on conflict(emp_id, plan_id) where ("responsibility" != 'admin') DO Nothing;
Converted this query into knex query, but it's avoiding the where clause when I printed the query build by knex.
let result = knex('members').withSchema('public').insert(insertParameters);
result = result
.whereNotIn({
'member.responsibility': 'admin',
})
.onConflict(['emp_id', 'plan_id'])
.ignore();
result = await result.returning('id');
return result;
Query prepared by knex -
insert into "public"."member" ("plan_id", "responsibility", "emp_id") values ($1, $2, $3), ($4, $5, $6), ($7, $8, $9) on conflict ("emp_id", "plan_id") do nothing returning "id"
There is an index created for columns -emp_id, plan_id, and responsibility != 'admin', so we need to write the where clause.
My code is this:
db.run(`begin transaction;
replace into players (user_id, role) values ($id, $role);
update signed_up_users set finalised = 1 where user_id = $id;
commit;`, { $id: id, $role: role}, err=>{
if (err) {throw err;}
});
And I get the following error:
Error: SQLITE_RANGE: bind or column index out of range
--> in Database#run('begin transaction;\n replace into players (user_id, role) values (?, ?);\n update signed_up_users set finalised = 1 where user_id = ?;\n commit;', [ '123', 'INNOCENT', '123' ], [Function])
at Object.exports.finalise_user (C:\whatever\file.js)
at user.resolve_to_id.then.id (C:\whatever\file.js:52:14)
at <anonymous>
at process._tickCallback (internal/process/next_tick.js:188:7)
If I understand the documentation correctly, I have the right amount of parameters and placeholders in my statement. So what am I doing wrong? Thanks.
The documentation actually says:
Database#run(sql, [param, ...], [callback])
Runs the SQL query […]
Database#exec(sql, [callback])
Runs all SQL queries in the supplied string.
I'm running NodeJS and pg-promise, and are trying to accomplish somethings like:
db.none('INSERT INTO my-table (JSON-object-keys) VALUES ($1)', [JSON-object-values])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
I have JSON-objects which can look like:
{"column1":"value1", "column2":"value2", "column3":"value3"}
{"column2":"value2", "column3":"value3"}
{"column1":"value1", "column3":"value3"}
I would like to have the INSERTS automatically generated corresponding to what the JSON-object contains.
Is that possible in an elegant way?
Explained a bit more, in the 3 examples of JSON the following should be generated:
db.none('INSERT INTO my-table (column1, column2, column3) VALUES ($1, $2, $3)', [value1, value2, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
db.none('INSERT INTO my-table (column2, column3) VALUES ($1, $2)', [value2, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
db.none('INSERT INTO my-table (column1, column3) VALUES ($1, $2)', [value1, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
Your pgp object + the input object with all the properties:
var pgp = require('pg-promise')({
capSQL: true // capitalize all generated SQL
});
var inputObj = {
/* all your optional properties */
};
Define the raw-text type, using Custom Type Formatting:
var rawText = text => ({_rawType: true, toPostgres: () => text});
Create a generic default column, according to class Column:
var defCol = name => ({name, def: rawText('DEFAULT')});
// which is the same as:
var defCol = name => new pgp.helpers.Column({name, def: rawText('DEFAULT')});
Generate the list of default-able columns:
var cols = Object.keys(inputObj).map(defCol);
Create a ColumnSet with those columns:
var cs = new pgp.helpers.ColumnSet(cols, {table: 'my-table'});
When it is time to generate an insert query, you can do:
var insert = pgp.helpers.insert(inputObj, cs);
Recommended Approach
If you know the columns in advance, then you should just do the following:
var cs = new pgp.helpers.ColumnSet(
[defCol('column1'), defCol('column2'), defCol('column3')],
{table: 'my-table'});
A static cs object will always provide a much better performance.
This approach is also safer, because you do not need to verify whether there is at least one property in the object, 'cos if there isn't, you'll get an error saying that it is impossible to generate an insert when there are no columns.
And this approach also works with multi-row inserts, which is very important. See also: Multi-row insert with pg-promise.
SELECT *
FROM (SELECT *, ROW_NUMBER() OVER ( ORDER BY No_ ) AS RowNum
FROM Item) DerivedTable
WHERE RowNum >= 501 AND RowNum <= 501 + ( 5 - 1 );
I think the older sql server versions do no support FETCH ROWS and NEXT ROWS which is equivalent to OFFSET and LIMIT in mysql, the above query seems the only way to apply that logic.
How can sequelize implement the above query, which creates a virtual table "DerivedTable" with a column "RowNum" that is used in the WHERE clause.
Is there any other method to do this in sequelize, maybe including raw query or anything else?
It seems you are not alone with this issue. With SQL Server 2012, you can just use:
Model
.findAndCountAll({
where: {
title: {
$like: 'foo%'
}
},
offset: 10,
limit: 2
})
.then(function(result) {
console.log(result.count);
console.log(result.rows);
});
However since you are on an earlier version it seems you are stick with having to hand write the query.
Something like this:
var theQuery = 'declare #rowsPerPage as bigint; '+
'declare #pageNum as bigint;'+
'set #rowsPerPage='+rowsPerPage+'; '+
'set #pageNum='+page+'; '+
'With SQLPaging As ( '+
'Select Top(#rowsPerPage * #pageNum) ROW_NUMBER() OVER (ORDER BY ID asc) '+
'as resultNum, * '+
'FROM myTableName)'+
'select * from SQLPaging with (nolock) where resultNum > ((#pageNum - 1) * #rowsPerPage);';
sequelize.query(theQuery)
.spread(function(result) {
console.log("Good old paginated results: ", result);
});
});
see this and this
I'm trying to run two parameterised insert queries using node-postgres: the first one specifies the primary key column, the second doesn't.
The second query, even though doesn't specify the primary key column, fails saying there's a duplicate primary key.
My pg table:
CREATE TABLE teams (
id serial PRIMARY KEY,
created_by int REFERENCES users,
name text,
logo text
);
Code that reproduces this issue:
var pg = require('pg');
var insertWithId = 'INSERT INTO teams(id, name, created_by) VALUES($1, $2, $3) RETURNING id';
var insertWithoutId = 'INSERT INTO teams(name, created_by) VALUES($1, $2) RETURNING id';
pg.connect(process.env.POSTGRES_URI, function (err, client, releaseClient) {
client.query(insertWithId, [1, 'First Team', 1], function (err, result) {
releaseClient();
if (err) {
throw err;
}
console.log('first team created');
});
});
pg.connect(process.env.POSTGRES_URI, function (err, client, releaseClient) {
client.query(insertWithoutId, ['Second Team', 1], function (err, result) {
releaseClient();
if (err) {
console.log(err);
}
});
});
And output of running this:
first team created
{ [error: duplicate key value violates unique constraint "teams_pkey"]
name: 'error',
length: 173,
severity: 'ERROR',
code: '23505',
detail: 'Key (id)=(1) already exists.',
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: 'public',
table: 'teams',
column: undefined,
dataType: undefined,
constraint: 'teams_pkey',
file: 'nbtinsert.c',
line: '406',
routine: '_bt_check_unique' }
What I gather from reading the node-postgres source, parameterised queries are treated as prepared queries, which get cached if they reuse a name parameter; though from digging around it's source, it doesn't seem to think that my queries have a name property.
Does anyone have any ideas on how this could be avoided?
The first insert supplies a value for id, so the serial is not incremented. The serial still is 1 after the first insert. The second insert does not supply a value for id, so the serial (=1) is used. Which is a duplicate. Best solution is to only use the second statement, and let the application use the returned id, if needed.
In short: don't interfere with serials.
If you need to correct the next value for a sequence, you can use something like the below statement.
SELECT setval('teams_id_seq', (SELECT MAX(id) FROM teams) )
;