Insert multiple values into a column of a table in PostgreSQL [duplicate] - node.js

A single row can be inserted like this:
client.query("insert into tableName (name, email) values ($1, $2) ", ['john', 'john#gmail.com'], callBack)
This approach automatically comments out any special characters.
How do i insert multiple rows at once?
I need to implement this:
"insert into tableName (name, email) values ('john', 'john#gmail.com'), ('jane', 'jane#gmail.com')"
I can just use js string operators to compile such rows manually, but then i need to add special characters escape somehow.

Use pg-format like below.
var format = require('pg-format');
var values = [
[7, 'john22', 'john22#gmail.com', '9999999922'],
[6, 'testvk', 'testvk#gmail.com', '88888888888']
];
client.query(format('INSERT INTO users (id, name, email, phone) VALUES %L', values),[], (err, result)=>{
console.log(err);
console.log(result);
});

One other way using PostgreSQL json functions:
client.query('INSERT INTO table (columns) ' +
'SELECT m.* FROM json_populate_recordset(null::your_custom_type, $1) AS m',
[JSON.stringify(your_json_object_array)], function(err, result) {
if (err) {
console.log(err);
} else {
console.log(result);
}
});

Following this article: Performance Boost from pg-promise library, and its suggested approach:
// Concatenates an array of objects or arrays of values, according to the template,
// to use with insert queries. Can be used either as a class type or as a function.
//
// template = formatting template string
// data = array of either objects or arrays of values
function Inserts(template, data) {
if (!(this instanceof Inserts)) {
return new Inserts(template, data);
}
this.rawType = true;
this.toPostgres = function () {
return data.map(d=>'(' + pgp.as.format(template, d) + ')').join(',');
};
}
An example of using it, exactly as in your case:
var users = [['John', 23], ['Mike', 30], ['David', 18]];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('$1, $2', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
And it will work with an array of objects as well:
var users = [{name: 'John', age: 23}, {name: 'Mike', age: 30}, {name: 'David', age: 18}];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('${name}, ${age}', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
UPDATE-1
For a high-performance approach via a single INSERT query see Multi-row insert with pg-promise.
UPDATE-2
The information here is quite old now, see the latest syntax for Custom Type Formatting. What used to be _rawDBType is now rawType, and formatDBType was renamed into toPostgres.

You are going to have to generate the query dynamically. Although possible, this is risky, and could easily lead to SQL Injection vulnerabilities if you do it wrong. It's also easy to end up with off by one errors between the index of your parameters in the query and the parameters you're passing in.
That being said, here is an example of how you could do write this, assuming you have an array of users that looks like {name: string, email: string}:
client.query(
`INSERT INTO table_name (name, email) VALUES ${users.map(() => `(?, ?)`).join(',')}`,
users.reduce((params, u) => params.concat([u.name, u.email]), []),
callBack,
)
An alternative approach, is to use a library like #databases/pg (which I wrote):
await db.query(sql`
INSERT INTO table_name (name, email)
VALUES ${sql.join(users.map(u => sql`(${u.name}, ${u.email})`), ',')}
`)
#databases requires the query to be tagged with sql and uses that to ensure any user data you pass is always automatically escaped. This also lets you write the parameters inline, which I think makes the code much more readable.

Using npm module postgres (porsager/postgres) which has Tagged Template Strings at the core:
https://github.com/porsager/postgres#multiple-inserts-in-one-query
const users = [{
name: 'Murray',
age: 68,
garbage: 'ignore'
},
{
name: 'Walter',
age: 80,
garbage: 'ignore'
}]
sql`insert into users ${ sql(users, 'name', 'age') }`
// Is translated to:
insert into users ("name", "age") values ($1, $2), ($3, $4)
// Here you can also omit column names which will use all object keys as columns
sql`insert into users ${ sql(users) }`
// Which results in:
insert into users ("name", "age", "garbage") values ($1, $2, $3), ($4, $5, $6)
Just thought I'd post since it's like brand new out of beta and I've found it to be a better philosophy of SQL library. I think would be preferable over the other postgres/node libraries posted in other answers. IMHO

Hi I know I am late to the party, but what worked for me was a simple map.
I hope this will help someone seeking for same
let sampleQuery = array.map(myRow =>
`('${myRow.column_a}','${myRow.column_b}') `
)
let res = await pool.query(`INSERT INTO public.table(column_a, column_b) VALUES ${sampleQuery} `)

client.query("insert into tableName (name, email) values ($1, $2),($3, $4) ", ['john', 'john#gmail.com','john', 'john#gmail.com'], callBack)
doesn't help?
Futher more, you can manually generate a string for query:
insert into tableName (name, email) values (" +var1 + "," + var2 + "),(" +var3 + ", " +var4+ ") "
if you read here, https://github.com/brianc/node-postgres/issues/530 , you can see the same implementation.

Related

Insert multiple rows at once in node postgres [duplicate]

A single row can be inserted like this:
client.query("insert into tableName (name, email) values ($1, $2) ", ['john', 'john#gmail.com'], callBack)
This approach automatically comments out any special characters.
How do i insert multiple rows at once?
I need to implement this:
"insert into tableName (name, email) values ('john', 'john#gmail.com'), ('jane', 'jane#gmail.com')"
I can just use js string operators to compile such rows manually, but then i need to add special characters escape somehow.
Use pg-format like below.
var format = require('pg-format');
var values = [
[7, 'john22', 'john22#gmail.com', '9999999922'],
[6, 'testvk', 'testvk#gmail.com', '88888888888']
];
client.query(format('INSERT INTO users (id, name, email, phone) VALUES %L', values),[], (err, result)=>{
console.log(err);
console.log(result);
});
One other way using PostgreSQL json functions:
client.query('INSERT INTO table (columns) ' +
'SELECT m.* FROM json_populate_recordset(null::your_custom_type, $1) AS m',
[JSON.stringify(your_json_object_array)], function(err, result) {
if (err) {
console.log(err);
} else {
console.log(result);
}
});
Following this article: Performance Boost from pg-promise library, and its suggested approach:
// Concatenates an array of objects or arrays of values, according to the template,
// to use with insert queries. Can be used either as a class type or as a function.
//
// template = formatting template string
// data = array of either objects or arrays of values
function Inserts(template, data) {
if (!(this instanceof Inserts)) {
return new Inserts(template, data);
}
this.rawType = true;
this.toPostgres = function () {
return data.map(d=>'(' + pgp.as.format(template, d) + ')').join(',');
};
}
An example of using it, exactly as in your case:
var users = [['John', 23], ['Mike', 30], ['David', 18]];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('$1, $2', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
And it will work with an array of objects as well:
var users = [{name: 'John', age: 23}, {name: 'Mike', age: 30}, {name: 'David', age: 18}];
db.none('INSERT INTO Users(name, age) VALUES $1', Inserts('${name}, ${age}', users))
.then(data=> {
// OK, all records have been inserted
})
.catch(error=> {
// Error, no records inserted
});
UPDATE-1
For a high-performance approach via a single INSERT query see Multi-row insert with pg-promise.
UPDATE-2
The information here is quite old now, see the latest syntax for Custom Type Formatting. What used to be _rawDBType is now rawType, and formatDBType was renamed into toPostgres.
You are going to have to generate the query dynamically. Although possible, this is risky, and could easily lead to SQL Injection vulnerabilities if you do it wrong. It's also easy to end up with off by one errors between the index of your parameters in the query and the parameters you're passing in.
That being said, here is an example of how you could do write this, assuming you have an array of users that looks like {name: string, email: string}:
client.query(
`INSERT INTO table_name (name, email) VALUES ${users.map(() => `(?, ?)`).join(',')}`,
users.reduce((params, u) => params.concat([u.name, u.email]), []),
callBack,
)
An alternative approach, is to use a library like #databases/pg (which I wrote):
await db.query(sql`
INSERT INTO table_name (name, email)
VALUES ${sql.join(users.map(u => sql`(${u.name}, ${u.email})`), ',')}
`)
#databases requires the query to be tagged with sql and uses that to ensure any user data you pass is always automatically escaped. This also lets you write the parameters inline, which I think makes the code much more readable.
Using npm module postgres (porsager/postgres) which has Tagged Template Strings at the core:
https://github.com/porsager/postgres#multiple-inserts-in-one-query
const users = [{
name: 'Murray',
age: 68,
garbage: 'ignore'
},
{
name: 'Walter',
age: 80,
garbage: 'ignore'
}]
sql`insert into users ${ sql(users, 'name', 'age') }`
// Is translated to:
insert into users ("name", "age") values ($1, $2), ($3, $4)
// Here you can also omit column names which will use all object keys as columns
sql`insert into users ${ sql(users) }`
// Which results in:
insert into users ("name", "age", "garbage") values ($1, $2, $3), ($4, $5, $6)
Just thought I'd post since it's like brand new out of beta and I've found it to be a better philosophy of SQL library. I think would be preferable over the other postgres/node libraries posted in other answers. IMHO
Hi I know I am late to the party, but what worked for me was a simple map.
I hope this will help someone seeking for same
let sampleQuery = array.map(myRow =>
`('${myRow.column_a}','${myRow.column_b}') `
)
let res = await pool.query(`INSERT INTO public.table(column_a, column_b) VALUES ${sampleQuery} `)
client.query("insert into tableName (name, email) values ($1, $2),($3, $4) ", ['john', 'john#gmail.com','john', 'john#gmail.com'], callBack)
doesn't help?
Futher more, you can manually generate a string for query:
insert into tableName (name, email) values (" +var1 + "," + var2 + "),(" +var3 + ", " +var4+ ") "
if you read here, https://github.com/brianc/node-postgres/issues/530 , you can see the same implementation.

Javascript promise to iterate/include dynamic number of Arguments

I'm using the mssql npm module (with Tedious driver) to read/write to Azure Sql database from my node Server. https://www.npmjs.com/package/mssql
All the examples I've found provide an hardcoded example of the query whether to read or write records, like this:
var insertRecordIntoTable = function (callback) {
sql.connect(dbConfig).then(pool => {
return pool.request()
.input('ID', sql.Int, 210)
.input('Name', sql.NVarChar, "John Doe")
.input('EmailAddress', sql.NVarChar, "test#test.com")
.query("INSERT INTO Accounts (ID, Name, EmailAddress) VALUES (#ID, #Name, #EmailAddress)")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
Obviously, I'd like to write one standard method to write records to any table in the database.
Now I can fetch structure of each table and use that to find how what datatype each field should be from the key in jsonRecord and write something like this:
var insertRecordIntoTable = function (jsonRecord, tableName, callback) {
let arrKeys = jsonRecord.allKeys();
let columnNames = getCommaSeparatedColumnNames(arrKeys);
let valuePlaceholders = getValuePlaceholdersForSql(arrKeys);
sql.connect(dbConfig).then(pool => {
return pool.request()
// how do I write something like this so that dynamic number of fields and values get populated in the query inside this promise.
// I'm open to methods without promise as well.
for(let x=0; x < arrKeys.length; x++){
let key = arrKeys[x];
// .input('ID', sql.Int, 210)
.input(key, getTypeForKey(key, tableName), jsonRecord[ key ] )
}
.query("INSERT INTO " + tableName + " (" + columnNames + ") VALUES (" + valuePlaceholders + ")")
}).then(result => {
console.dir(result)
callback(result);
}).catch(err => {
// ... error checks
console.log("Error occured: " + err);
callback(err);
});
}
function getTypeForKey(key){. // looks up table schema and returns keyType }
function getCommaSeparatedColumnNames(arrKeys){ return arrKeys.join(", "); }
function getValuePlaceholdersForSql(arrKeys){ // write code to append '#' before every key and then join using comma's and return that string }
I'm sure node.js writing to SQL is a fairly common functionality and there may be better ways to achieve what I'm trying to do here. Please feel free to go a different route.
P.S. - Although I should say that I prefer mssql over Tedious package. It just seems better in functionality after going through the documentation in the last several hours.
If you want to interact with your database without creating all the queries by yourself, you can use a query builder like knex to manage the data as objects:
knex('Accounts').insert({ID: 210, Name: "John Doe", EmailAddress: "test#test.com"})
Would be similar to:
insert into `Accounts` (`EmailAddress`, `ID`, `Name`) values ('test#test.com', 210, 'John Doe')
Also I see you are checking types. If you need validation, maybe a complete ORM (I like Objection.js) would be a good choice.

How to update a key of object with value in json column with knexjs?

I'm trying to update a column in users table the column type is json.
column name is test.
and the column consists of an object default value for example is
{a: "text", b: 0}
how to update let's say the object key b without changing the whole column
the code i'm using is
knexDb('users').where({
email: email
})
.update({
test: { b: 1 }
})
second solution
knexDb('users').where({
email: email
})
.update({
test: knexDb.raw(`jsonb_set(??, '{b}', ?)`, ['test', 1])
})
first solution changes the whole column cell and test will be only { b: 1 }
second solution doesn't work it give an error
function jsonb_set(json, unknown, unknown) does not exist
The expected result
is to manage to update only a certain key value in an object without changing the whole object.
PS
I also want to update an array that consists of objects like the above one for example.
[{a: "text", b: 0}, {c: "another-text", d: 0}]
if i use the code above in kenxjs it'll update the whole array to only {b: 1}
PS after searching a lot found that in order to make it work i need to set column type to jsonb, in order the above jsonb_set() to work
but now i'm facing another issue
how to update multiple keys using jsonb_set
knexDb('users').where({
email: email
})
.update({
test: knexDb.raw(`jsonb_set(??, '{b}', ?)`, ['test', 1]),
test: knexDb.raw(`jsonb_set(??, '{a}', ?)`, ['test', "another-text"]),
})
the first query key b is now not updating, in fact all updates don't work except the last query key a, so can some explain why ?
Your issue is that you're overwriting test. What you're passing into update is a JS object (docs). You cannot have multiple keys with identical values (docs). You'll have to do something like this where you make 1 long string with all your raw SQL as the value to test.
knexDb('users').where({
email: email
})
.update({
test: knexDb.raw(`
jsonb_set(??, '{a}', ?)
jsonb_set(??, '{b}', ?)
`,
['test', "another-text", 'test', 1])
})
Probably a better option exists - one that would be much more readable if you have to do this for several columns is something like what I have included below. In this example, the column containing the jsonb is called json.
const updateUser = async (email, a, b) => {
const user = await knexDb('users')
.where({ email })
.first();
user.json.a = a;
user.json.b = b;
const updatedUser = await knexDb('users')
.where({ email })
.update(user)
.returning('*');
return updatedUser;
}
Update/insert a single field in a JSON column:
knex('table')
.update( {
your_json_col: knex.jsonSet('your_json_col','$.field', 'new value')
})
.where(...)
Update/insert multiple fields
Option 1 (nested)
knex('table')
.update( {
your_json_col: knex.jsonSet(knex.jsonSet('your_json_col','$.field1', 'val1')
'$.field2', 'val2')
})
.where(...)
Option 2 (chained)
knex('table')
.update( {
your_json_col: knex.jsonSet('your_json_col','$.field1', 'val1')
})
.update( {
your_json_col: knex.jsonSet('your_json_col','$.field2', 'val2')
})
.where(...)

SQL Server : lower camelcase for each result

I am creating an API with SQL Server as the database. My tables and columns are using Pascal case (CountryId, IsDeleted, etc) that cannot be changed.
So when I do this:
const mssql = require('mssql');
var sqlstr =
'select * from Country where CountryId = #countryId';
var db = await koaApp.getDb();
let result = await db.request()
.input('countryId', mssql.Int, countryId)
.query(sqlstr);
My resulting object is
{
CountryId: 1,
CountryName: "Germany"
}
But I want it to be
{
countryId: 1,
countryName: "Germany"
}
I know there is a "row" event, but I wanted something more performant (since I may be returning several rows from the query, above is just an example).
Any suggestions?
PS: I want to avoid the FOR JSON syntax
Posting this as an actual answer, as it proved helpful to the OP:
if it's viable, you may try simply specifying the columns in the query as such:
select
CountryID countryId,
CountryName countryName
from
Country
where
CountryId = #countryId
Typically it's not best practice to use select * within queries anyways because of performance.
A simple explanation, putting a space and a new name (or perhaps better practice, within square brackets after each column name, such as CountryName [countryName] - this allows for characters such as spaces to be included within the new names) is aliasing the name with a new name of your choosing when returned from SQL.
I'd suggest using the lodash utility library to convert the column names, there is a _.camelCase function for this:
CamelCase documentation
_.camelCase('Foo Bar');
// => 'fooBar'
_.camelCase('--foo-bar--');
// => 'fooBar'
_.camelCase('__FOO_BAR__');
// => 'fooBar'
You can enumerate the result keys using Object.entries then do a reduce, e.g.
let result = {
CountryId: 1,
CountryName: "Germany"
};
let resultCamelCase = Object.entries(result).reduce((obj,[key,value]) => {
obj[_.camelCase(key)] = value;
return obj;
}, {});
console.log(resultCamelCase);
<script src="https://cdnjs.cloudflare.com/ajax/libs/lodash.js/4.17.11/lodash.js"></script>

NodeJS and pg-promise, insert dynamically from JSON-object

I'm running NodeJS and pg-promise, and are trying to accomplish somethings like:
db.none('INSERT INTO my-table (JSON-object-keys) VALUES ($1)', [JSON-object-values])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
I have JSON-objects which can look like:
{"column1":"value1", "column2":"value2", "column3":"value3"}
{"column2":"value2", "column3":"value3"}
{"column1":"value1", "column3":"value3"}
I would like to have the INSERTS automatically generated corresponding to what the JSON-object contains.
Is that possible in an elegant way?
Explained a bit more, in the 3 examples of JSON the following should be generated:
db.none('INSERT INTO my-table (column1, column2, column3) VALUES ($1, $2, $3)', [value1, value2, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
db.none('INSERT INTO my-table (column2, column3) VALUES ($1, $2)', [value2, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
db.none('INSERT INTO my-table (column1, column3) VALUES ($1, $2)', [value1, value3])
.catch(function(err) {
console.log('Error on insert into my-table: ' + err);
});
Your pgp object + the input object with all the properties:
var pgp = require('pg-promise')({
capSQL: true // capitalize all generated SQL
});
var inputObj = {
/* all your optional properties */
};
Define the raw-text type, using Custom Type Formatting:
var rawText = text => ({_rawType: true, toPostgres: () => text});
Create a generic default column, according to class Column:
var defCol = name => ({name, def: rawText('DEFAULT')});
// which is the same as:
var defCol = name => new pgp.helpers.Column({name, def: rawText('DEFAULT')});
Generate the list of default-able columns:
var cols = Object.keys(inputObj).map(defCol);
Create a ColumnSet with those columns:
var cs = new pgp.helpers.ColumnSet(cols, {table: 'my-table'});
When it is time to generate an insert query, you can do:
var insert = pgp.helpers.insert(inputObj, cs);
Recommended Approach
If you know the columns in advance, then you should just do the following:
var cs = new pgp.helpers.ColumnSet(
[defCol('column1'), defCol('column2'), defCol('column3')],
{table: 'my-table'});
A static cs object will always provide a much better performance.
This approach is also safer, because you do not need to verify whether there is at least one property in the object, 'cos if there isn't, you'll get an error saying that it is impossible to generate an insert when there are no columns.
And this approach also works with multi-row inserts, which is very important. See also: Multi-row insert with pg-promise.

Resources