NodeJS and pg-promise, insert dynamically from JSON-object - node.js

I'm running NodeJS and pg-promise, and are trying to accomplish somethings like:
db.none('INSERT INTO my-table (JSON-object-keys) VALUES ($1)', [JSON-object-values])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
I have JSON-objects which can look like:
{"column1":"value1", "column2":"value2", "column3":"value3"}
{"column2":"value2", "column3":"value3"}
{"column1":"value1", "column3":"value3"}
I would like to have the INSERTS automatically generated corresponding to what the JSON-object contains.
Is that possible in an elegant way?
Explained a bit more, in the 3 examples of JSON the following should be generated:
db.none('INSERT INTO my-table (column1, column2, column3) VALUES ($1, $2, $3)', [value1, value2, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
db.none('INSERT INTO my-table (column2, column3) VALUES ($1, $2)', [value2, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
db.none('INSERT INTO my-table (column1, column3) VALUES ($1, $2)', [value1, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});

Your pgp object + the input object with all the properties:
var pgp = require('pg-promise')({
capSQL: true // capitalize all generated SQL
});
var inputObj = {
/* all your optional properties */
};
Define the raw-text type, using Custom Type Formatting:
var rawText = text => ({_rawType: true, toPostgres: () => text});
Create a generic default column, according to class Column:
var defCol = name => ({name, def: rawText('DEFAULT')});
// which is the same as:
var defCol = name => new pgp.helpers.Column({name, def: rawText('DEFAULT')});
Generate the list of default-able columns:
var cols = Object.keys(inputObj).map(defCol);
Create a ColumnSet with those columns:
var cs = new pgp.helpers.ColumnSet(cols, {table: 'my-table'});
When it is time to generate an insert query, you can do:
var insert = pgp.helpers.insert(inputObj, cs);
Recommended Approach
If you know the columns in advance, then you should just do the following:
var cs = new pgp.helpers.ColumnSet(
[defCol('column1'), defCol('column2'), defCol('column3')],
{table: 'my-table'});
A static cs object will always provide a much better performance.
This approach is also safer, because you do not need to verify whether there is at least one property in the object, 'cos if there isn't, you'll get an error saying that it is impossible to generate an insert when there are no columns.
And this approach also works with multi-row inserts, which is very important. See also: Multi-row insert with pg-promise.

Related

Insert multiple values into a column of a table in PostgreSQL [duplicate]

A single row can be inserted like this:
client.query("insert into tableName (name, email) values ($1, $2) ", ['john', 'john#gmail.com'], callBack)
This approach automatically comments out any special characters.
How do i insert multiple rows at once?
I need to implement this:
"insert into tableName (name, email) values ('john', 'john#gmail.com'), ('jane', 'jane#gmail.com')"
I can just use js string operators to compile such rows manually, but then i need to add special characters escape somehow.
Use pg-format like below.
var format = require('pg-format');
var values = [
[7, 'john22', 'john22#gmail.com', '9999999922'],
[6, 'testvk', 'testvk#gmail.com', '88888888888']
];
client.query(format('INSERT INTO users (id, name, email, phone) VALUES %L', values),[], (err, result)=>{
console.log(err);
console.log(result);
});
One other way using PostgreSQL json functions:
client.query('INSERT INTO table (columns) ' +
'SELECT m.* FROM json_populate_recordset(null::your_custom_type, $1) AS m',
[JSON.stringify(your_json_object_array)], function(err, result) {
if (err) {
console.log(err);
} else {
console.log(result);
}
});
Following this article: Performance Boost from pg-promise library, and its suggested approach:
// Concatenates an array of objects or arrays of values, according to the template,
// to use with insert queries. Can be used either as a class type or as a function.
//
// template = formatting template string
// data = array of either objects or arrays of values
function Inserts(template, data) {
if (!(this instanceof Inserts)) {
return new Inserts(template, data);
}
this.rawType = true;
this.toPostgres = function () {
return data.map(d=>'(' + pgp.as.format(template, d) + ')').join(',');
};
}
An example of using it, exactly as in your case:
var users = [['John', 23], ['Mike', 30], ['David', 18]];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('$1, $2', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
And it will work with an array of objects as well:
var users = [{name: 'John', age: 23}, {name: 'Mike', age: 30}, {name: 'David', age: 18}];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('${name}, ${age}', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
UPDATE-1
For a high-performance approach via a single INSERT query see Multi-row insert with pg-promise.
UPDATE-2
The information here is quite old now, see the latest syntax for Custom Type Formatting. What used to be _rawDBType is now rawType, and formatDBType was renamed into toPostgres.
You are going to have to generate the query dynamically. Although possible, this is risky, and could easily lead to SQL Injection vulnerabilities if you do it wrong. It's also easy to end up with off by one errors between the index of your parameters in the query and the parameters you're passing in.
That being said, here is an example of how you could do write this, assuming you have an array of users that looks like {name: string, email: string}:
client.query(
`INSERT INTO table_name (name, email) VALUES ${users.map(() => `(?, ?)`).join(',')}`,
users.reduce((params, u) => params.concat([u.name, u.email]), []),
callBack,
)
An alternative approach, is to use a library like #databases/pg (which I wrote):
await db.query(sql`
INSERT INTO table_name (name, email)
VALUES ${sql.join(users.map(u => sql`(${u.name}, ${u.email})`), ',')}
`)
#databases requires the query to be tagged with sql and uses that to ensure any user data you pass is always automatically escaped. This also lets you write the parameters inline, which I think makes the code much more readable.
Using npm module postgres (porsager/postgres) which has Tagged Template Strings at the core:
https://github.com/porsager/postgres#multiple-inserts-in-one-query
const users = [{
name: 'Murray',
age: 68,
garbage: 'ignore'
},
{
name: 'Walter',
age: 80,
garbage: 'ignore'
}]
sql`insert into users ${ sql(users, 'name', 'age') }`
// Is translated to:
insert into users ("name", "age") values ($1, $2), ($3, $4)
// Here you can also omit column names which will use all object keys as columns
sql`insert into users ${ sql(users) }`
// Which results in:
insert into users ("name", "age", "garbage") values ($1, $2, $3), ($4, $5, $6)
Just thought I'd post since it's like brand new out of beta and I've found it to be a better philosophy of SQL library. I think would be preferable over the other postgres/node libraries posted in other answers. IMHO
Hi I know I am late to the party, but what worked for me was a simple map.
I hope this will help someone seeking for same
let sampleQuery = array.map(myRow =>
`('${myRow.column_a}','${myRow.column_b}') `
)
let res = await pool.query(`INSERT INTO public.table(column_a, column_b) VALUES ${sampleQuery} `)
client.query("insert into tableName (name, email) values ($1, $2),($3, $4) ", ['john', 'john#gmail.com','john', 'john#gmail.com'], callBack)
doesn't help?
Futher more, you can manually generate a string for query:
insert into tableName (name, email) values (" +var1 + "," + var2 + "),(" +var3 + ", " +var4+ ") "
if you read here, https://github.com/brianc/node-postgres/issues/530 , you can see the same implementation.

Insert multiple rows at once in node postgres [duplicate]

A single row can be inserted like this:
client.query("insert into tableName (name, email) values ($1, $2) ", ['john', 'john#gmail.com'], callBack)
This approach automatically comments out any special characters.
How do i insert multiple rows at once?
I need to implement this:
"insert into tableName (name, email) values ('john', 'john#gmail.com'), ('jane', 'jane#gmail.com')"
I can just use js string operators to compile such rows manually, but then i need to add special characters escape somehow.
Use pg-format like below.
var format = require('pg-format');
var values = [
[7, 'john22', 'john22#gmail.com', '9999999922'],
[6, 'testvk', 'testvk#gmail.com', '88888888888']
];
client.query(format('INSERT INTO users (id, name, email, phone) VALUES %L', values),[], (err, result)=>{
console.log(err);
console.log(result);
});
One other way using PostgreSQL json functions:
client.query('INSERT INTO table (columns) ' +
'SELECT m.* FROM json_populate_recordset(null::your_custom_type, $1) AS m',
[JSON.stringify(your_json_object_array)], function(err, result) {
if (err) {
console.log(err);
} else {
console.log(result);
}
});
Following this article: Performance Boost from pg-promise library, and its suggested approach:
// Concatenates an array of objects or arrays of values, according to the template,
// to use with insert queries. Can be used either as a class type or as a function.
//
// template = formatting template string
// data = array of either objects or arrays of values
function Inserts(template, data) {
if (!(this instanceof Inserts)) {
return new Inserts(template, data);
}
this.rawType = true;
this.toPostgres = function () {
return data.map(d=>'(' + pgp.as.format(template, d) + ')').join(',');
};
}
An example of using it, exactly as in your case:
var users = [['John', 23], ['Mike', 30], ['David', 18]];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('$1, $2', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
And it will work with an array of objects as well:
var users = [{name: 'John', age: 23}, {name: 'Mike', age: 30}, {name: 'David', age: 18}];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('${name}, ${age}', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
UPDATE-1
For a high-performance approach via a single INSERT query see Multi-row insert with pg-promise.
UPDATE-2
The information here is quite old now, see the latest syntax for Custom Type Formatting. What used to be _rawDBType is now rawType, and formatDBType was renamed into toPostgres.
You are going to have to generate the query dynamically. Although possible, this is risky, and could easily lead to SQL Injection vulnerabilities if you do it wrong. It's also easy to end up with off by one errors between the index of your parameters in the query and the parameters you're passing in.
That being said, here is an example of how you could do write this, assuming you have an array of users that looks like {name: string, email: string}:
client.query(
`INSERT INTO table_name (name, email) VALUES ${users.map(() => `(?, ?)`).join(',')}`,
users.reduce((params, u) => params.concat([u.name, u.email]), []),
callBack,
)
An alternative approach, is to use a library like #databases/pg (which I wrote):
await db.query(sql`
INSERT INTO table_name (name, email)
VALUES ${sql.join(users.map(u => sql`(${u.name}, ${u.email})`), ',')}
`)
#databases requires the query to be tagged with sql and uses that to ensure any user data you pass is always automatically escaped. This also lets you write the parameters inline, which I think makes the code much more readable.
Using npm module postgres (porsager/postgres) which has Tagged Template Strings at the core:
https://github.com/porsager/postgres#multiple-inserts-in-one-query
const users = [{
name: 'Murray',
age: 68,
garbage: 'ignore'
},
{
name: 'Walter',
age: 80,
garbage: 'ignore'
}]
sql`insert into users ${ sql(users, 'name', 'age') }`
// Is translated to:
insert into users ("name", "age") values ($1, $2), ($3, $4)
// Here you can also omit column names which will use all object keys as columns
sql`insert into users ${ sql(users) }`
// Which results in:
insert into users ("name", "age", "garbage") values ($1, $2, $3), ($4, $5, $6)
Just thought I'd post since it's like brand new out of beta and I've found it to be a better philosophy of SQL library. I think would be preferable over the other postgres/node libraries posted in other answers. IMHO
Hi I know I am late to the party, but what worked for me was a simple map.
I hope this will help someone seeking for same
let sampleQuery = array.map(myRow =>
`('${myRow.column_a}','${myRow.column_b}') `
)
let res = await pool.query(`INSERT INTO public.table(column_a, column_b) VALUES ${sampleQuery} `)
client.query("insert into tableName (name, email) values ($1, $2),($3, $4) ", ['john', 'john#gmail.com','john', 'john#gmail.com'], callBack)
doesn't help?
Futher more, you can manually generate a string for query:
insert into tableName (name, email) values (" +var1 + "," + var2 + "),(" +var3 + ", " +var4+ ") "
if you read here, https://github.com/brianc/node-postgres/issues/530 , you can see the same implementation.

Javascript promise to iterate/include dynamic number of Arguments

I'm using the mssql npm module (with Tedious driver) to read/write to Azure Sql database from my node Server. https://www.npmjs.com/package/mssql
All the examples I've found provide an hardcoded example of the query whether to read or write records, like this:
var insertRecordIntoTable = function (callback) {
sql.connect(dbConfig).then(pool => {
return pool.request()
.input('ID', sql.Int, 210)
.input('Name', sql.NVarChar, "John Doe")
.input('EmailAddress', sql.NVarChar, "test#test.com")
.query("INSERT INTO Accounts (ID, Name, EmailAddress) VALUES (#ID, #Name, #EmailAddress)")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
Obviously, I'd like to write one standard method to write records to any table in the database.
Now I can fetch structure of each table and use that to find how what datatype each field should be from the key in jsonRecord and write something like this:
var insertRecordIntoTable = function (jsonRecord, tableName, callback) {
let arrKeys = jsonRecord.allKeys();
let columnNames = getCommaSeparatedColumnNames(arrKeys);
let valuePlaceholders = getValuePlaceholdersForSql(arrKeys);
sql.connect(dbConfig).then(pool => {
return pool.request()
// how do I write something like this so that dynamic number of fields and values get populated in the query inside this promise.
// I'm open to methods without promise as well.
for(let x=0; x < arrKeys.length; x++){
let key = arrKeys[x];
// .input('ID', sql.Int, 210)
.input(key, getTypeForKey(key, tableName), jsonRecord[ key ] )
}
.query("INSERT INTO " + tableName + " (" + columnNames + ") VALUES (" + valuePlaceholders + ")")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
function getTypeForKey(key){. // looks up table schema and returns keyType }
function getCommaSeparatedColumnNames(arrKeys){ return arrKeys.join(", "); }
function getValuePlaceholdersForSql(arrKeys){ // write code to append '#' before every key and then join using comma's and return that string }
I'm sure node.js writing to SQL is a fairly common functionality and there may be better ways to achieve what I'm trying to do here. Please feel free to go a different route.
P.S. - Although I should say that I prefer mssql over Tedious package. It just seems better in functionality after going through the documentation in the last several hours.
If you want to interact with your database without creating all the queries by yourself, you can use a query builder like knex to manage the data as objects:
knex('Accounts').insert({ID: 210, Name: "John Doe", EmailAddress: "test#test.com"})
Would be similar to:
insert into `Accounts` (`EmailAddress`, `ID`, `Name`) values ('test#test.com', 210, 'John Doe')
Also I see you are checking types. If you need validation, maybe a complete ORM (I like Objection.js) would be a good choice.

SQLite - Update field if new value isn't empty

I'm making a little web interface with nodejs, express and sqlite3. There's a table with a fixed number of rows and columns, which can be updated through a form. The user isn't required to fill out all of the fields, so there's a lot of empty values in the post request. I only want to write back the the populated fields to the database and ignore the empty ones. I can think of a number of ugly and tedious ways to achieve this, but I'm sure there's some really simple solution I don't know of.
app.post('/updatevals', (req, res) => {
for (const m in req.body) {
const val = req.body[m]; // val = ['value1', '', '', 5]
// user left index 1 + 2 empty
// last value is primary key
// Overwrites val2 + val3 with empty strings...
const sql = "UPDATE mytable SET val1=?, val2=?, val3=? WHERE id=?";
db.run(sql, val, (err) => {});
}
});

node-postgres prepared statement - sql injection

I am new to node-postgres and am unable to resolve this error when I try to ensure no sql injection is possible with my prepared statement.
Here is a snippet of the code
// the prepared statement
var preparedstatement = client.query({
text: "select ST_AsText(ST_Transform(geodata,4326)) from table_name where ST_Contains(ST_GeomFromText($1,4326),table_name.geodata)",
values: ["POINT(Lat Long)"],
name: 'where'
});
// the query
var queryresult = client.query({name: 'where', values: [msg]},["'; DROP TABLE user;"], function(err) {
if (err) {
socket.emit('query error', String(err));
}
});
Whenever I enter the geodata (as a message from the client using socket.io), the socket.emit returns an error saying
Invalid geometry
However the code works fine when I remove ["'; DROP TABLE user;"], from the code i.e.
// the query
var queryresult = client.query({name: 'where', values: [msg]}, function(err) {
if (err) {
socket.emit('query error', String(err));
}
});
(above) works perfectly. Any help in helping me understand what I am doing wrong here would be great.
var preparedstatement = client.query({
text: "select ST_AsText(ST_Transform(geodata,4326)) from table_name where ST_Contains(ST_GeomFromText($1,4326),table_name.geodata)",
values: ["POINT(Lat Long)"],
name: 'where'
});
results to SQL smth like
prepare "where" as
select ST_AsText(ST_Transform(geodata,4326))
from table_name
where ST_Contains(ST_GeomFromText($1,4326),table_name.geodata);
execute "where" (POINT(Lat Long));
which probably could work if lat nad long are table_name attributes
next:
var queryresult = client.query({name: 'where', values: [msg]}, function(err) {
if (err) {
socket.emit('query error', String(err));
}
});
does:
execute "where" (msg_value);
which probably works if they are of compatible data type
and finally:
var queryresult = client.query({name: 'where', values: [msg]},["'; DROP TABLE user;"], function(err) {
if (err) {
socket.emit('query error', String(err));
}
});
runs SQL:
execute "where" ('''; DROP TABLE user;');
which gives an error as this text is not a valid geometry...
noticable here that lient.query(text QUERY,array VALUES) here is used as lient.query(object QUERY,array VALUES) and VALUES overcame such in QUERY object, this your [msg] was "ignored"...
NB
checking if prepared statements are prune to such sql injection is pointless as this feature was implemented with idea to be safe for such injections. For example even if you would use data type text (to avoid type mismatch) and try to inject semicolon and drop statement, prepared statement would treat injection as literal value and thsus be safe. eg:
var preparedstatement = client.query({
text: "select $1::text resulting_att",
values: ['some default'],
name: 'ps_name'}
);
var queryresult = client.query({name: 'ps_name'},["'; DROP TABLE user;"], function(err,res) {
console.log(err,res.rows)
client.end()
});
logs:
null [ anonymous { resulting_att: '\'; DROP TABLE user;' } ]
and not tries to drop anything.

Resources