Bind message supplies 1 parameters, but prepared statement "" requires 2 - node.js

I have a database goods with two columns id jsonb primary_key and name.
Using this query:
const query = 'INSERT INTO "goods" (id, name) VALUES ($1, $2)'
together with the following data:
const data = {id: 1, name: "milk"};
gives me the following error:
{ [error: bind message supplies 1 parameters, but prepared statement "" requires 2]
name: 'error',
length: 130,
severity: 'ERROR',
code: '08P01',
detail: undefined,
hint: undefined,
position: undefined,
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'postgres.c',
line: '1556',
routine: 'exec_bind_message' }
I have a postgres database set up, connected via pg.Pool() and executing javascript to insert my data.
Edit:
This is how I prepare my query:
pool.query(query, [data]).then(() => {
console.log("ok")
})
.catch(error => {
console.log(error)
});
Edit2:
Using the following:
const query = 'INSERT INTO "goods" (id, name) VALUES ($1, $2)'
const data = JSON.stringify([1, "milk"]);
pool.query(query, data).then(() => {
console.log("ok")
})
.catch(error => {
console.log(error)
});
Just spits out the following error: [TypeError: self.values.map is not a function]

As per docs, parameters must be JavaScript object (which is array). So you don't need to stringify data
Try this:
const query = 'INSERT INTO goods (id, name) VALUES ($1, $2)'
const data = [1, "milk"];
pool.query(query, data).then(....)
Or
pool.query({
text: 'INSERT INTO goods (id, name) VALUES ($1, $2)',
values: [1, 'milk']
}).then(...)

As per documentation, a Prepared Statement expects an array of values, not an object with properties, i.e. your data must be: const data = [1, "milk"];

I had the same problem using slonik.
Don't use interpolation
Don't do this!
connection.query(sql`
SELECT 1
FROM foo
WHERE bar = ${baz}
`);
Use value placeholders
Do this - wrap variable with single quote
connection.query(sql`
SELECT 1
FROM foo
WHERE bar = ${'baz'}
`);

Related

Nodejs Inserting multiple rows into postgres errors on data

I have a script reads data from a json file and inserts them into postgres. Well, that's what it's supposed to do.
The json is read properly and I can see the data as an array in the variables. The connection is made to postgres successfully.
Then I get this error:
syntax error at or near "'{"Archived":"false","ClientEmail":"imaclientcompany#gmail.com","ClientId":52,"ClientName":"Ima Client","DateCreated":1637074825658,"DateSubmitted":1637076927912,"ExternalClientId":"null","Id":"8b9391c0-00af-4710-9481-e0e33ddea546","Practitioner":"bob#jonesperformancecenter.com","PractitionerId":"57b49e3d12cd2f144cebb405","PractitionerName":"Bob Jones","QuestionnaireId":"612524e13ccc040f58dd134e","QuestionnaireName":"PTS Pre Session Form","Status":"Completed"}'" at character 224.
The details of the error:
length: 566,
severity: 'ERROR',
code: '42601',
detail: undefined,
hint: undefined,
position: '224',
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'scan.l',
line: '1145',
routine: 'scanner_yyerror'
Character 224 is in the middle of the Id field.
The 42601 error is a syntax with no hint or detail. When I google 'scanner_yyerror' all the hits refer to unescaped single quotes. There are no single quotes in the data.
Here is the script:
const pg = require('pg');
const { Pool } = require('pg');
const format = require('pg-format');
const fs = require('fs');
let rawintakes = fs.readFileSync('./data/intakes.json', 'utf8');
let intakes = JSON.parse(rawintakes);
let query1 = format('INSERT INTO intake_summary (Archived, ClientEmail, ClientId, ClientName, DateCreated, DateSubmitted, ExternalClientId, Id, Practitioner, PractitionerId, PractitionerName, QuestionnaireId, QuestionnaireName, Status) VALUES %L returning id', intakes);
async function run() {
let client;
try {
client = new pg.Pool({
connectionString: 'postgres://postres:reallystrongpassword#localhost:5432/cldba01'
});
await client.connect();
let {rows} = await client.query(query1, intakes);
console.log(rows);
} catch (e) {
console.error(e);
} finally {
client.end();
}
}
run();
I can input the same data using SQL without a problem. I have deleted the first record and the same problem occurs on the second one at the same position 224.
I've looked at the query and I don't see a syntax error there either.
Any ideas?
My advice is to use SEQUELIZE instead of SQL , it is far easier and more clear.

Convert a ColumnSet column into geometry with pg-promise

I'm creating a ColumnSet object with pg-promise, according to this:
const cs = new pgp.helpers.ColumnSet([
{name: 'Id',prop: 'Id'},
{name: 'Lat',prop: 'Lat'},
{name: 'Lng',prop: 'Lng'},
{name: 'CreationDateTime',prop: 'CreationDateTime'},
{name: 'Topic',prop: 'Topic'},
{name: 'UserId',prop: 'UserId'},
{name: 'shape',mod: ':raw',prop: 'shape',def: 'point'},
{name: 'UserName',prop: 'UserName'},
{name: 'appName',prop: 'appName'},
{name: 'appVersion',prop: 'appVersion'}
], {
table: 'Location'
});
def: 'point' point is method to converting into geometry-- This is a value or how can i run point method and do bind in this column (shape) ?
and write this method for bulk inserting :
async function insertMany(values) {
try {
let results = await db.none(pgp.helpers.insert(values, cs));
} catch (error) {
console.log(error);
}
}
for converting lat and lng i wrote this method :
const point = (lat, lng) => ({
toPostgres: () => pgp.as.format('ST_SetSRID(ST_MakePoint($1, $2), 4326)', [Lag, Lng]),
rawType: true
});
But I got this error:
TypeError: Values null/undefined cannot be used as raw text
According this page:
Raw-text variables end with :raw or symbol ^, and prevent escaping the text. Such variables are not allowed to be null or undefined, or the method will throw TypeError = Values null/undefined cannot be used as raw text.
When point method is not executed, of course that shape filed is null.
First, you are misusing option prop, which is documented as to be used when the destination property name differs from the column name, which is not your case.
And def, as documented also, represents the value when the property is missing. When the property is there set to null or undefined, the value of def isn't used.
You are trying to override the resulting value, that means you need to use property init.
Another issue - your variables inside point implementation switch cases.
In all, your code should look something like this:
const getPoint = col => {
const p = col.value;
// we assume that when not null, the property is an object of {lat, lng},
// otherwise we will insert NULL.
return p ? pgp.as.format('ST_SetSRID(ST_MakePoint(${lat}, ${lng}), 4326)', p) : 'NULL';
};
const cs = new pgp.helpers.ColumnSet([
'Id',
'Lat',
'Lng',
'CreationDateTime',
'Topic',
'UserId',
{name: 'shape', mod: ':raw', init: getPoint},
'UserName',
'appName',
'appVersion',
], {
table: 'Location'
});
And version that uses Custom Type Formatting would look like this:
const getPoint = col => {
const p = col.value;
if(p) {
return {
toPostgres: () => pgp.as.format('ST_SetSRID(ST_MakePoint(${lat}, ${lng}), 4326)', p),
rawType: true
};
}
// otherwise, we return nothing, which will result into NULL automatically
};
const cs = new pgp.helpers.ColumnSet([
'Id',
'Lat',
'Lng',
'CreationDateTime',
'Topic',
'UserId',
{name: 'shape', init: getPoint},
'UserName',
'appName',
'appVersion',
], {
table: 'Location'
});

Nodejs - Apache Cassandra (using Datastax Driver)

I'm trying to insert a map type in cassandra:
const query = 'INSERT INTO stats.tickets (office, line, generation, inserted_at, meta, number, prefix) VALUES (:office, :line, :generation, :inserted_at, :meta, :number, :prefix)';
const parametersExample = {
office: "office",
line: "line",
generation: 10,
inserted_at: Date.now(),
meta: {"tag1": "ujkukkik", "tag2": "asdascee"},
number: 1,
prefix: "prefix_"
};
const result = async () => {
return await client.execute(query, parametersExample, { prepare: true });
};
result().then( res => {
res.rows.map( row => console.log(row.content) );
process.exit();
}).catch( err => console.error(err));
The code insert the row but shows the following message:
TypeError: Cannot read property 'map' of undefined
at lib/handleData.js:23:14
at <anonymous>
at process._tickDomainCallback (internal/process/next_tick.js:228:7)
What is the reason?
That's normal behavior - per CQL documentation:
INSERT returns no results unless IF NOT EXISTS is used.
So driver receives undefined... Usually .map will be used for selects.

nodeJS inserting Data into PostgreSQL error

I have a weird error using NodeJS with a PostgreSQL and I hope you can maybe help me out.
I have a huge amount of data sets, about 2 Million entries that I want to insert into my DB.
One data consists of 4 columns:
id: string,
points: float[][]
mid: float[]
occurences: json[]
I am inserting data like so:
let pgp = require('pg-promise')(options);
let connectionString = 'postgres://archiv:archiv#localhost:5432/fotoarchivDB';
let db = pgp(connectionString);
cityNet.forEach((arr) => {
db
.none(
"INSERT INTO currentcitynet(id,points,mid,occurences) VALUES $1",
Inserts("${id},${points}::double precision[],${mid}::double precision[],${occurences}::json[]",arr))
.then(data => {
//success
})
.catch(error => {
console.log(error);
//error
});
})
function Inserts(template, data) {
if (!(this instanceof Inserts)) {
return new Inserts(template, data);
}
this._rawDBType = true;
this.formatDBType = function() {
return data.map(d => "(" + pgp.as.format(template, d) + ")").join(",");
};
This works out for exactly for the first 309248 data pieces, then suddenly it just errors out with the following for (what it seems like) every next data it tries to insert:
{ error: syntax error at end of input
at Connection.parseE (/home/christian/Masterarbeit_reworked/projekt/server/node_modules/pg-promise/node_modules/pg/lib/connection.js:539:11)
at Connection.parseMessage (/home/christian/Masterarbeit_reworked/projekt/server/node_modules/pg-promise/node_modules/pg/lib/connection.js:366:17)
at Socket.<anonymous> (/home/christian/Masterarbeit_reworked/projekt/server/node_modules/pg-promise/node_modules/pg/lib/connection.js:105:22)
at emitOne (events.js:96:13)
at Socket.emit (events.js:188:7)
at readableAddChunk (_stream_readable.js:176:18)
at Socket.Readable.push (_stream_readable.js:134:10)
at TCP.onread (net.js:548:20)
name: 'error',
length: 88,
severity: 'ERROR',
code: '42601',
detail: undefined,
hint: undefined,
position: '326824',
internalPosition: undefined,
internalQuery: undefined,
where: undefined,
schema: undefined,
table: undefined,
column: undefined,
dataType: undefined,
constraint: undefined,
file: 'scan.l',
line: '1074',
routine: 'scanner_yyerror' }
The 'position' entry changes for every iterating error-message.
I can redo that and it will always error after 309248 entries.
When I try to insert less, like 1000 entries, the error does not occur.
That really confuses me. I thought PostgreSQL does not have any max amount of rows. Also the error message does not help me at all.
SOLVED
The error was found. In my data there were "null" entries that have slipped into it. Filtering out null-data worked out.
I will try out the other recommendations for inserting data, since the current way works, but the performance is very crappy.
I'm the author of pg-promise. Your whole approach should be changed to the one below.
Proper way to do massive inserts via pg-promise:
const pgp = require('pg-promise')({
capSQL: true
});
const db = pgp(/*connection details*/);
var cs = new pgp.helpers.ColumnSet([
'id',
{name: 'points', cast: 'double precision[]'},
{name: 'mid', cast: 'double precision[]'},
{name: 'occurences', cast: 'json[]'}
], {table: 'currentcitynet'});
function getNextInsertBatch(index) {
// retrieves the next data batch, according to the index, and returns it
// as an array of objects. A normal batch size: 1000 - 10,000 objects,
// depending on the size of the objects.
//
// returns null when there is no more data left.
}
db.tx('massive-insert', t => {
return t.sequence(index => {
const data = getNextInsertBatch(index);
if (data) {
const inserts = pgp.helpers.insert(data, cs);
return t.none(inserts);
}
});
})
.then(data => {
console.log('Total batches:', data.total, ', Duration:', data.duration);
})
.catch(error => {
console.log(error);
});
UPDATE
And if getNextInsertBatch can only get the data asynchronously, then return a promise from it, and update the sequence->source callback accordingly:
return t.sequence(index => {
return getNextInsertBatch(index)
.then(data => {
if (data) {
const inserts = pgp.helpers.insert(data, cs);
return t.none(inserts);
}
});
});
Related Links:
tx
sequence / spex.sequence
ColumnSet
Multi-row insert with pg-promise
I'm not sure, but it looks like you got wrong data structure at the last element(309249) and PostgreSQL cannot parse some property

Omiting column names / inserting objects directly into node-postgres

I'd like to pass dictionaries with column names as keys, thus avoiding declaring the column names within the query itself (typing them directly).
Assume I have a table User with 2 column names:
idUser(INT)
fullName(VARCHAR)
To create a record using node-postgres, I'll need to declare within the query the column names like so:
var idUser = 2;
var fullName = "John Doe";
var query = 'INSERT INTO User(idUser, age) VALUES ($1, $2)';
database.query(query, [idUser, fullName], function(error, result) {
callback(error, result.rows);
database.end();
});
I'd prefer if there was a way to just pass a dictionary & have it infer the column names from the keys - If there's an easy trick I'd like to hear it.
E.g something like this:
var values = {
idUser : 2,
fullName: "John Doe"
};
var query = 'INSERT INTO User VALUES ($1)';
database.query(query, [values], function(error, result) {
callback(error, result.rows);
database.end();
});
A complete example of doing it with pg-promise:
const pgp = require('pg-promise')(/*options*/);
const cn = 'postgres://username:password#host:port/database';
const db = pgp(cn);
const values = {
idUser: 2,
fullName: 'John Doe'
};
// generating the insert query:
const query = pgp.helpers.insert(values, null, 'User');
//=> INSERT INTO "User"("idUser","fullName") VALUES(2,'John Doe')
db.none(query)
.then(data => {
// success;
})
.catch(error => {
// error;
});
And with focus on high performance it would change to this:
// generating a set of columns from the object (only once):
const cs = new pgp.helpers.ColumnSet(values, {table: 'User'});
// generating the insert query:
const query = pgp.helpers.insert(values, cs);
//=> INSERT INTO "User"("idUser","fullName") VALUES(2,'John Doe')
There's no support for key-value values in the insert statement, so it can not be done with native sql.
However, the node-postgres extras page mentions multiple sql generation tools, and for example Squel.js parameters can be used to construct sql in a way very close like what you're looking for:
squel.insert()
.into("User")
.setFieldsRows([
{ idUser: 2, fullName: "John Doe" }
])
.toParam()
// => { text: 'INSERT INTO User (idUser, fullName) VALUES (?, ?)',
// values: [ 2, 'John Doe' ] }
My case was a bit special as I had a field named order in the JSON object which is a keyword in SQL. Therefore I had to wrap everything in quotes using a JSONify() function.
Also note the numberedParameters argument as well as the double quotes around the 'Messages' string.
import { pool } from './connection';
function JSONify(obj: Map<string, any>) {
var o = {};
for (var i in obj) {
o['"' + i + '"'] = obj[i]; // make the quotes
}
return o;
}
// I have a table named "Messages" with the columns order and name
// I also supply the createdAt and updatedAt timestamps just in case
const messages = [
{
order: 0,
name: 'Message with index 0',
createdAt: new Date().toISOString(),
updatedAt: new Date().toISOString(),
}
]
// Create the insert statement
const insertStatement = insert({ numberedParameters: true })
.into('"Messages"')
.setFieldsRows(messages.map((message) => JSONify(message)))
.toParam();
console.log(insertStatement);
// Notice the quotes wrapping the table and column names
// => { text: 'INSERT INTO "Messages" ("order", "name", "createdAt", "updatedAt") VALUES ($1, $2, $3, $4)',
// values: [ 0, 'Message with index 0', '2022-07-22T13:51:27.679Z', '2022-07-22T13:51:27.679Z' ] }
// Create
await pool.query(insertStatement.text, insertStatement.values);
See the Squel documentation for more details.
And this is how I create the pool object if anyone is curious.
import { Pool } from 'pg';
import { DB_CONFIG } from './config';
export const pool = new Pool({
user: DB_CONFIG[process.env.NODE_ENV].username,
host: DB_CONFIG[process.env.NODE_ENV].host,
database: DB_CONFIG[process.env.NODE_ENV].database,
password: DB_CONFIG[process.env.NODE_ENV].password,
port: DB_CONFIG[process.env.NODE_ENV].port,
});

Resources