PG Promise error when select then insert the data - node.js

i got error on my pg-promise like this:
this is my snippet code:
// create sample payload
let samplePayload = [];
for (let index = 0; index < 10; index++) {
samplePayload.push({
id: uuidv1(),
});
}
return db.task((t) => {
samplePayload.map( sp => {
// i want check if the id is exist on db
let sql = `select * from sf_data where id = '${sp.id}'`;
console.log(sql)
const sampleBatch = t.any(sql).then(th => {
// i want insert it if data not exists
console.log(th);
});
})
});
i want to check if the list of data is exist in DF. If not exists, need to insert the data.
i try to fix my old code and change into this
const sfdata = await t.any('SELECT * FROM sf_data');
const queries = sfdata.map((sp) => {
return t.oneOrNone('select * from sf_data where id = ${id}', sp).then(result => {
console.log(result)
if(result){
t.tx(async t2 => {
return t2.none("insert into test_sf values ($1, $2, $3)", [uuidv1(), result.id, result.sfid]);
})
}
});
});
return t.batch(queries);
but it return error:
(node:6547) UnhandledPromiseRejectionWarning: Error: Client was closed and is not queryable
at /Users/dsa/Telkom/dtp-dsa-middleware-sf/node_modules/pg/lib/client.js:570:27
at processTicksAndRejections (internal/process/task_queues.js:75:11)
any clue about this?

You are missing return in a few places, and your code creates a bunch of loose promises - queries that are trying to execute outside the connection, hence the error.
Here's correct code:
return db.task(t => {
const queries = samplePayload.map(sp => {
return t.any('select * from sf_data where id = ${id}', sp);
})
return t.batch(queries);
});
Better yet, this can be done with one query:
const ids = samplePayload.map(sp => sp.id);
return db.any('select * from sf_data where id in ($1)', [ids]);

Related

Getting Error [Cannot read properties of undefined (reading 'generatetypeinfo')] in Node JS API post method

I am new to Restful API development using NodeJS and SQL Server. I am trying to do a simple [post] operation where I am passing an array of objects to the API endpoint and then calling a SQL Server procedure with a table valued parameter. I am getting the below error
Cannot read properties of undefined (reading 'generateTypeInfo')
I was really shocked to see that there is not a single help topic found over Google regarding this error. I do not want to learn ASP.NET Core for this because JavaScript has an easy learning curve. Am I doing a mistake by developing a Rest API by using the combination of NodeJS and SQL Server? Below is my Related .JS file called in Post endpoint
const sql = require("mssql/msnodesqlv8");
const dataAccess = require("../DataAccess");
const fn_CreateProd = async function (product) {
let errmsg = "";
let connPool = null;
await sql
.connect(global.config)
.then((pool) => {
global.connPool = pool;
result = pool.request().query("select * from products where 1=2");
return result;
})
.then((retResult) => {
const srcTable = retResult.recordset.toTable("tvp_products");
let newsrcTable = Array.from(srcTable.columns);
console.log('Source table b4 mapping',srcTable)
newsrcTable = newsrcTable.map((i) => {
i.name = i.name.toUpperCase();
return i;
});
console.log('Source table after convert array with mapping',newsrcTable)
const prdTable = dataAccess.generateTable(
newsrcTable,
product,
"tvp_products"
);
console.log("Prepared TVp data", prdTable);
const newResult = dataAccess.execute(`sp3s_ins_products_tvp`, [
{ name: "tblprods", value: prdTable },
]);
console.log("Result of Execute Final procedure", newResult);
return newResult;
})
.then(result => {
console.log("Result of proc", result);
if (!result.errmsg) errmsg = "Products Inserted successfully";
else errmsg = result.errmsg;
})
.catch((err) => {
console.log("Enter catch of Posting prod", err.message);
errmsg = err.message;
})
.finally((resp) => {
sql.close();
});
return { retStatus: errmsg };
};
module.exports = fn_CreateProd;
and Content of Generatetable function are as below :
const generateTable = (columns, entities,tvpName) => {
const table = new mssql.Table(tvpName);
// const testobj = {type : [sql.numeric],name : 'Sanjay'}
// console.log('Columns testobj',testobj.type)
columns.forEach(column => {
// console.log('COlumn data for COlumn :',column)
if (column && typeof column === 'object' && column.name && column.type) {
let colOptions = {}
if (column.type==mssql.Numeric)
{
colOptions.scale=column.scale
colOptions.precision=column.precision
}
else
if (column.type==mssql.VarChar || column.type==mssql.Char )
{
colOptions.length = column.length
}
// console.log (`Column name type for column :${column.name} -${colType}-Actual :${column['type']}`)
if (column.hasOwnProperty('options')) {
table.columns.add(column.name.toUpperCase(), colType,column.options);
} else {
table.columns.add(column.name.toUpperCase(),colOptions)
}
}
});
console.log('Generated table',table)
const newEntities = entities.map(obj=>keystoUppercase(obj))
// console.log('New entities after uppercase',newEntities)
newEntities.forEach(entity => {
table.rows.add(...columns.map(i =>
entity[i.name]));
});
return table;
};
I have found the solution now. Actually, if you can see the code of generateTable function, I was adding the columns into the table but not mentioning the data type of the columns due to which this error was coming. I have added one more property [type] in the [colOptions] object being passed to columns.add command in the function [Generatetable]. Thanks a lot anyway to you for quick replies by Dale. K.

Node.js Query sqlite with 'sqlite`

I'm trying to get a hang of Node (I mainly use python) so I'm working on a small project to read an write data to a sqlite database.
I am having no issue writing to the database luckily, but I cannot seem to get queries to work at all. I've tested the queries in the sql terminal and they are successful.
So far, I have something like
const fs = require("fs");
const util = require("util");
const sqlite = require("sqlite");
const Promise = require("bluebird")
// const DATABASE = ":memory:";
const DATABASE = "./database.sqlite";
function insertDataIntoDatabase(transactions, db) {
// Write each transaction into the database.
let sqlStatement = "INSERT INTO Trx \
(name, address, amount, category) \
VALUES "
for (var i = 0; i < transactions.length; ++i) {
let trx = transactions[i];
sqlStatement += util.format(
"('%s', '%s', %d, '%s'), ",
trx.name,
trx.address,
trx.amount,
trx.category,
);
}
sqlStatement = sqlStatement.substring(0, sqlStatement.length - 2);
db.then(db => db.run(sqlStatement))
.catch((err) => console.log(err));
}
function getTransactions (db, category) {
// Return an array of valid transactions of a given category.
let where = "";
if (category) {
where = util.format("WHERE category='%s'", category);
}
let sqlStatement = util.format("SELECT * from Trx %s", where);
sqlStatement = "SELECT * from Trx"; // Trying to figure out whats happening
console.log(sqlStatement);
db.then(db => {
db.all(sqlStatement)
.then((err, rows) => {
console.log(rows); // undefined
console.log(err); // []
})
})
}
// Set up the db connection
const db = sqlite.open(DATABASE, { cached: true })
.then(db => db.migrate({ force: 'last' }));
// Read transactions and write them to the database
fs.readFile("transactions.json", "utf8", (err, data) => {
let transactions = JSON.parse(data).transactions;
insertDataIntoDatabase(transactions, db);
})
// Get transaction data
getValidTransactions(db, 'credit');
// Close connection to DB
db.then(db => db.close());
Looking at this again, I think the issue is the async nature of Node. The query was successful, but at that point in time, I had not inserted the data from the json file into the database yet, hence the empty query.

How do I return a value inside a Promise? In "then()" and "catch()"

I just want to return 1 and return 0 at the specified places. I've looked at numerous sources but was unable to solve this issue.
Below is the code :
exports.getLatLng = function(row){
var attractionId = row['attractionid'];
var attractionName = row['attractionname'] + ' ' +
row['Destination_name'];
var googleMapsResults;
return googleMapsClient.geocode({address: attractionName}).asPromise()
.then((response) => {
googleMapsResults = response.json.results[0];
// console.log(googleMapsResults);
model.dumpIntoMongo(attractionId, googleMapsResults);
// var y=tmp[0];
var latitude = googleMapsResults.geometry.location.lat;
var longitude = googleMapsResults.geometry.location.lng;
row["id"] = parseInt(attractionId);
// delete row['attractionid'];
delete row['destination_id'];
delete row['destination_name'];
delete row['attraction_id'];
delete row['success'];
// row["lat"] = latitude;
// row["lon"] = longitude;
row["latlon"] = latitude.toString() + "," + longitude.toString();
exports.indexIntoSolr(row);
return 1; //return 1
})
.catch((err) => {
console.log(err);
return 0; // return 0
});
}
In case If you want another more clear implementation, you can refer this one.
Here, First write a function which returns some data as promise after certain calculations:
function getSomething(credentials) {
const {
name
} = credentials;
return new Promise((resolve, reject) => {
if(everything_as_expected){
return resolve(some_data)
}
else if(some_error){
return reject(error);
}
});
}
To handle results/Data returned by the function( promisely ), call that previous function:
getSomething(credentials)
.then((message) => {
console.log(`${message} got after success.`);
})
.catch((error_message) => {
console.log(error_message);
});
The method returns a promise that is pending until it resolves. To get a value you need to attach a continuation to it:
getLatLng(row).then( result => {
console.log( result );
});
This will definitely display 0 or 1 as your internal implementation of the getLatLng looks like it correctly handles both execution paths.

How can i refactor my async code in javascript (I'm using Promise)

dbConnect(res).then((conn)=>{
query(conn,res,
`SELECT * FROM programs WHERE user_id = ?`,
[
user_id
]
).then((programList)=>{
new Promise((resolved,rejected)=>{
for(let i = 0; i<programList.length;i++){
query(conn,res,`
SELECT * FROM open_programs WHERE program_id = ?`,[programList[i].program_id])
.then((opList)=>{
Object.assign(programList[i],{openList : opList});
console.log(programList[i]);
if(i == (programList.length-1)){
resolved(programList)
}
})
}
}).then((result)=>{
conn.release();
res.json(toRes(SUCCESS,
{ data : result }
));
})
});
});
'dbConnect', 'query' is my custom method that consist of 'Promise'
In this code, first i get firstResults(schema=programs)
after i get nextsqlResults using firstResults.program_id in 'for loop'
and Object assign firstresult and nextResults
after assigning, I response the combined data
How can i develop this code?
Promise.all and Array.map will help you here. Untested:
dbConnect(res).then(conn => {
return query(conn, res, `SELECT * FROM programs WHERE user_id = ?`, [
user_id,
]);
}).then(programList => {
return Promise.all(programList.map( program =>
query(
conn,
res,
`SELECT * FROM open_programs WHERE program_id = ?`,
[program.program_id],
).then(opList => {
console.log(program);
return Object.assign(program, { openList: opList });
});
}).then(result => {
conn.release();
res.json(toRes(SUCCESS, { data: result }));
});
You should verify that result is the data structure you're looking for after this code.
Cleaned up a little more with the babel-plugin-transform-object-rest-spread plugin:
dbConnect(res).then(conn =>
query(conn, res, `SELECT * FROM programs WHERE user_id = ?`, [
user_id,
])
).then(programList =>
Promise.all(programList.map(program =>
query(
conn,
res,
`SELECT * FROM open_programs WHERE program_id = ?`,
[program.program_id],
).then(opList => ({
...program,
openList: opList
})
))
).then(result => {
conn.release();
res.json(toRes(SUCCESS, { data: result }));
});
#Andy Ray provided a nice solution. I just want to add some additional improvement.
In other to make thing readable, by extracting your existing code into smaller function you can organize your code like this:
connectDb(res)
.then(selectPrograms)
.then(populateOpenPrograms)
.then(handleResult);
And if you want to improve it more, consider using async / await

Is this the proper way to write a multi-statement transaction with Neo4j?

I am having a hard time interpretting the documentation from Neo4j about transactions. Their documentation seems to indicate preference to doing it this way rather than explicitly declaring tx.commit() and tx.rollback().
Does this look best practice with respect to multi-statement transactions and neo4j-driver?
const register = async (container, user) => {
const session = driver.session()
const timestamp = Date.now()
const saltRounds = 10
const pwd = await utils.bcrypt.hash(user.password, saltRounds)
try {
//Start registration transaction
const registerUser = session.writeTransaction(async (transaction) => {
const initialCommit = await transaction
.run(`
CREATE (p:Person {
email: '${user.email}',
tel: '${user.tel}',
pwd: '${pwd}',
created: '${timestamp}'
})
RETURN p AS Person
`)
const initialResult = initialCommit.records
.map((x) => {
return {
id: x.get('Person').identity.low,
created: x.get('Person').properties.created
}
})
.shift()
//Generate serial
const data = `${initialResult.id}${initialResult.created}`
const serial = crypto.sha256(data)
const finalCommit = await transaction
.run(`
MATCH (p:Person)
WHERE p.email = '${user.email}'
SET p.serialNumber = '${serial}'
RETURN p AS Person
`)
const finalResult = finalCommit.records
.map((x) => {
return {
serialNumber: x.get('Person').properties.serialNumber,
email: x.get('Person').properties.email,
tel: x.get('Person').properties.tel
}
})
.shift()
//Merge both results for complete person data
return Object.assign({}, initialResult, finalResult)
})
//Commit or rollback transaction
return registerUser
.then((commit) => {
session.close()
return commit
})
.catch((rollback) => {
console.log(`Transaction problem: ${JSON.stringify(rollback, null, 2)}`)
throw [`reg1`]
})
} catch (error) {
session.close()
throw error
}
}
Here is the reduced version of the logic:
const register = (user) => {
const session = driver.session()
const performTransaction = session.writeTransaction(async (tx) => {
const statementOne = await tx.run(queryOne)
const resultOne = statementOne.records.map((x) => x.get('node')).slice()
// Do some work that uses data from statementOne
const statementTwo = await tx.run(queryTwo)
const resultTwo = statementTwo.records.map((x) => x.get('node')).slice()
// Do final processing
return finalResult
})
return performTransaction.then((commit) => {
session.close()
return commit
}).catch((rollback) => {
throw rollback
})
}
Neo4j experts, is the above code the correct use of neo4j-driver ?
I would rather do this because its more linear and synchronous:
const register = (user) => {
const session = driver.session()
const tx = session.beginTransaction()
const statementOne = await tx.run(queryOne)
const resultOne = statementOne.records.map((x) => x.get('node')).slice()
// Do some work that uses data from statementOne
const statementTwo = await tx.run(queryTwo)
const resultTwo = statementTwo.records.map((x) => x.get('node')).slice()
// Do final processing
const finalResult = { obj1, ...obj2 }
let success = true
if (success) {
tx.commit()
session.close()
return finalResult
} else {
tx.rollback()
session.close()
return false
}
}
I'm sorry for the long post, but I cannot find any references anywhere, so the community needs this data.
After much more work, this is the syntax we have settled on for multi-statement transactions:
Start session
Start transaction
Use try/catch block after (to enable proper scope in catch block)
Perform queries in the try block
Rollback in the catch block
.
const someQuery = async () => {
const session = Neo4J.session()
const tx = session.beginTransaction()
try {
const props = {
one: 'Bob',
two: 'Alice'
}
const tx1 = await tx
.run(`
MATCH (n:Node)-[r:REL]-(o:Other)
WHERE n.one = $props.one
AND n.two = $props.two
RETURN n AS One, o AS Two
`, { props })
.then((result) => {
return {
data: '...'
}
})
.catch((err) => {
throw 'Problem in first query. ' + e
})
// Do some work using tx1
const updatedProps = {
_id: 3,
four: 'excellent'
}
const tx2 = await tx
.run(`
MATCH (n:Node)
WHERE id(n) = toInteger($updatedProps._id)
SET n.four = $updatedProps.four
RETURN n AS One, o AS Two
`, { updatedProps })
.then((result) => {
return {
data: '...'
}
})
.catch((err) => {
throw 'Problem in second query. ' + e
})
// Do some work using tx2
if (problem) throw 'Rollback ASAP.'
await tx.commit
session.close()
return Object.assign({}, tx1, { tx2 })
} catch (e) {
tx.rollback()
session.close()
throw 'someQuery# ' + e
}
}
I will just note that if you are passing numbers into Neo4j, you should wrap them inside the Cypher Query with toInteger() so that they are parsed correctly.
I included examples of query parameters also and how to use them. I found it cleans up the code a little.
Besides that, you basically can chain as many queries inside the transaction as you want, but keep in mind 2 things:
Neo4j write-locks all involved nodes during a transaction, so if you have several processes all performing operations on the same node, you will see that only one process can complete a transaction at a time. We made our own business logic to handle write issues and opted to not even use transactions. It is working very well so far, writing 100,000 nodes and creating 100,000 relationships in about 30 seconds spread over 10 processes. It took 10 times longer to do in a transaction. We experience no deadlocking or race conditions using UNWIND.
You have to await the tx.commit() or it won't commit before it nukes the session.
My opinion is that this type of transaction works great if you are using Polyglot (multiple databases) and need to create a node, and then write a document to MongoDB and then set the Mongo ID on the node.
It's very easy to reason about, and extend as needed.

Resources