How to prevent Knex from hanging on insert - node.js

I am using Knex.js to insert values from an array into a PostgreSQL database. The problem I keep running into is that Knex will hang after inserting rows in the database.
I've been struggling with this for several hours, and have tried a variety of solutions, including Get Knex.js transactions working with ES7 async/await, Make KnexJS Transactions work with async/await, and Knex Transaction with Promises.
No matter which flavor I try, I come back to the hang. I'm pretty sure I'm missing something obvious, but it's possible I haven't had enough coffee.
Here's my test code:
const testArray = [
{line: 'Canterbury Tales'},
{line: 'Moby Dick'},
{line: 'Hamlet'}
];
const insertData = (dataArray) => {
return new Promise( (resolve, reject) => {
const data = dataArray.map(x => {
return {
file_line: x.line
};
});
let insertedRows;
db.insert(data)
.into('file_import')
.then((result) => {
insertedRows = result.rowCount;
resolve(insertedRows);
})
});
}
const testCall = (b) => {
insertData(b).then((result) => {
console.log(`${result} rows inserted.`);
})
}
testCall(testArray);
This returns the following:
3 rows inserted.
EDIT: Updating with solution
Thanks to #sigmus, I was able to get this working by adding db.destroy(). Here's the updated code block, fully functional:
const testArray = [
{line: 'Canterbury Tales'},
{line: 'Moby Dick'},
{line: 'Hamlet'}
];
const insertData = (dataArray) => {
return new Promise( (resolve, reject) => {
const data = dataArray.map(x => {
return {
file_line: x.line
};
});
let insertedRows;
db.insert(data)
.into('file_import')
.then((result) => {
insertedRows = result.rowCount;
resolve(insertedRows);
})
.finally(() => {
db.destroy();
});
});
}
const testCall = (b) => {
insertData(b).then((result) => {
console.log(`${result} rows inserted.`);
process.exit(0);
})
}
testCall(testArray);

If you add process.exit(0); right after console.log(`${result} rows inserted.`); the script should exit.
It may be the case it's a connection pool issue, try using destroy like explained here: https://knexjs.org/#Installation-pooling

Related

Only first document is returned as response instead of multiple documents using Firestore and Cloud Functions

I have this Express function:
exports.getSliderTipsteriData = (req, res) => {
let sliderTipsteriData = [];
db.collection("tipsterBanner")
.orderBy("createdAt", "desc")
.where("show", "==", true)
.get()
.then((data) => {
data.forEach((doc) => {
let eventId = doc.data().eventId;
sliderTipsteriData = doc.data();
db.collection("evenimenteTipsteri")
.orderBy("createdAt", "desc")
.get()
.then((data) => {
sliderTipsteriData.tipsteri = [];
data.forEach((doc) => {
if(doc.data().bilet[0].id === sliderTipsteriData.eventId) {
sliderTipsteriData.tipsteri.push({
tipster: doc.data().tipster,
homeTeam: doc.data().bilet[0].homeTeam,
awayTeam: doc.data().bilet[0].awayTeam
})
} else null
})
return res.json(sliderTipsteriData);
})
})
})
.catch((err) => {
console.error(err);
res.status(500).json({ error: err.code });
});
};
and received this as response:
{
"imageUrl": "https://firebasestorage.googleapis.com/v0/b/socialape-bea5b.appspot.com/o/slider1.jpg?alt=media&token=0824a93d-4bc3-49fa-9ae8-4408961a0736",
"event_date": 1614110400,
"awayTeamName": "Bayer Leverkusen",
"awayTeamPercent": 23,
"homeTeamName": "Atletico Madrid",
"homeTeamShortName": "ATL",
"awayTeamEmblem": "https://media.api-sports.io/football/teams/34.png",
"createdAt": "2021-03-22T18:25:03.667Z",
"homeTeamEmblem": "https://media.api-sports.io/football/teams/49.png",
"awayTeamShortName": "LEV",
"homeTeamPercent": "77",
"show": true,
"eventId": 652238,
"homeTeamColor": "#0099ff",
"awayTeamColor": "#ff0000",
"etapa": "Liga Campionilor, Etapa 2",
"tipsteri": [
{
"tipster": "daniel",
"homeTeam": "Lazio",
"awayTeam": "Bayern Munich"
},
{
"tipster": "user",
"homeTeam": "Lazio",
"awayTeam": "Bayern Munich"
}
]
}
The problem is that I have more than one document in tipsterBanner collection, but I receive only the first one. So the forEach doc might not be working properly.
Any idea what I miss here?
I expect to receive as response the sliderTipsteriData array with multiple objects, not only the first one. Looks like the forEach actually doesn't loop.
This is because you have promises trying to run in loops that don't wait for them. Inside your then blocks, you perform more asynchronous calls (collection().get()) but your code isn't waiting for them to resolve, so it's just flying through your forEach loop, creating those promises, but then getting to the end and returning.
There are two ways to solve that problem - (1) put your then/catch calls into a Promise.all and wait for that to resolve or (2) switch to async/await. But I think you have an even better solution. Right now, you're querying your evenimenteTipsteri collection in each loop, but you're not using any information from the tipsterBanner collection as parameters in the evenimenteTipsteri query, so you could just query both of them one time and then handle all the filtering/organizing in code. This will help speed up your results and help protect you from the costs of unnecessary reads on Firestore.
This code is untested because I just copy/pasted your code and rewrote it without being able to run it, but this is the main idea:
exports.getSliderTipsteriData = (req, res) => {
let sliderTipsteriData = [];
// These create the promises which will resolve in our Promise.all()
const tipsterBanner = db.collection("tipsterBanner").orderBy("createdAt", "desc").where("show", "==", true).get()
const evenimenteTipsteri = db.collection("evenimenteTipsteri").orderBy("createdAt", "desc").get()
Promise.all([tipsterBanner, evenimenteTipsteri]).then((results) => {
// results is now an array of your original "data" items from your then blocks
// results[0] is the data from the tipsterBanner query
// results[1] is the data from the evenimenteTipsteri query
const tipsterBannerResults = results[0] // Just to make it more readable
const evenimenteTipsteriResults = results[1] // Just to make it more readable
tipsterBannerResults.forEach(doc => {
let eventId = doc.data().eventId
sliderTipsteriData = doc.data() // Is this right? You could end up overwriting your data
sliderTipsteriData.tipsteri = []
evenimenteTipsteriResults.forEach(doc => {
if(doc.data().bilet[0].id === sliderTipsteriData.eventId) {
sliderTipsteriData.tipsteri.push({
tipster: doc.data().tipster,
homeTeam: doc.data().bilet[0].homeTeam,
awayTeam: doc.data().bilet[0].awayTeam
})
}
})
})
return res.json(sliderTipsteriData)
}).catch(error => {
// Handle errors from your queries
})
}

How to use Redis cache for pagination?

I want to do pagination with redis cache, i'am using Node.js as my back-end and i'am using npm
redis-scanner package for scanning through the keys .
Here i'am unable to go to a particular key and fetch next 15 keys from redis cache. How do i do it?
You can use ioredis like so:
const Redis = require("ioredis");
async getByCursor({ cursor }) {
const db = new Redis();
const stream = db.scanStream({ count: 5 }); // count = number of items to return per page
try {
const scan = () =>
new Promise((resolve, reject) => {
let items = [];
stream.on("data", (page) => {
items = [...items, page]; // page = ['key1', 'key2', etc.]
});
stream.on("end", () => {
resolve(items); // items = [['key1', 'key2'], ['key3', 'key4']]
});
});
const keys = await scan();
return Promise.all(
keys[cursor].map(async (key) => JSON.parse(await db.get(key)))
);
} catch (e) {
console.error(e);
return Promise.resolve([]);
}
}
or slightly faster:
async getByCursor({ cursor }) {
const stream = db.scanStream({ count: 5 });
try {
return new Promise((resolve, reject) => {
let pageCount = 0;
stream.on("data", (page) => {
pageCount = pageCount + 1;
if (pageCount == Number(cursor)) {
resolve(
Promise.all(
page.map(async (id) => unpack(await db.get(id)))
).catch((e) => {
console.error(e);
reject([]);
})
);
}
});
});
} catch (e) {
console.error(e);
}
Using cursors
Here cursor value 1 provided e.g. as param or query when calling your 'items-by-cursor' node route gives you the first 5 items, value of 2 the next page etc. Note that the count is just an estimate, i.e. you can't rely on it always returning strictly 5 keys (see the documentation link below for details). You can check https://react-query.tanstack.com/examples/load-more-infinite-scroll for a frontend solution to obtain the relevant cursors to be sent to the route.
Note on performance
Fetching the key-value pairs via a stream is non-blocking in comparison to getting all items via all keys by calling db.get('*') and/or filtering the result in your route. Therefore this implementation is recommended for production.
Further ioredis documentation
https://github.com/luin/ioredis#streamify-scanning

How to fix MongoError: Cannot use a session that has ended

I'm trying to read data from a MongoDB Atlas collection using Node.js. When I try to read the contents of my collection I get the error MongoError: Cannot use a session that has ended. Here is my code
client.connect(err => {
const collection = client
.db("sample_airbnb")
.collection("listingsAndReviews");
const test = collection.find({}).toArray((err, result) => {
if (err) throw err;
});
client.close();
});
I'm able to query for a specific document, but I'm not sure how to return all documents of a collection. I've searched for this error, I can't find much on it. Thanks
In your code, it doesn't wait for the find() to complete its execution and goes on to the client.close() statement. So by the time it tries to read data from the db, the connection has already ended. I faced this same problem and solved it like this:
// connect to your cluster
const client = await MongoClient.connect('yourMongoURL', {
useNewUrlParser: true,
useUnifiedTopology: true,
});
// specify the DB's name
const db = client.db('nameOfYourDB');
// execute find query
const items = await db.collection('items').find({}).toArray();
console.log(items);
// close connection
client.close();
EDIT: this whole thing should be in an async function.
Ran into the same issue when I updated the MongoClient from 3.3.2 to the latest version (3.5.2 as of this writing.) Either install only 3.3.2 version by changing the package.json "mongodb": "3.3.2", or just use async and await wrapper.
If still the issue persists, remove the node_modules and install again.
One option is to use aPromise chain. collection.find({}).toArray() can either receive a callback function or return a promise, so you can chain calls with .then()
collection.find({}).toArray() // returns the 1st promise
.then( items => {
console.log('All items', items);
return collection.find({ name: /^S/ }).toArray(); //return another promise
})
.then( items => {
console.log("All items with field 'name' beginning with 'S'", items);
client.close(); // Last promise in the chain closes the database
);
Of course, this daisy chaining makes the code more synchronous. This is useful when the next call in the chain relates to the previous one, like getting a user id in the first one, then looking up user detail in the next.
Several unrelated queries should be executed in parallel (async) and when all the results are back, dispose of the database connection.
You could do this by tracking each call in an array or counter, for example.
const totalQueries = 3;
let completedQueries = 0;
collection.find({}).toArray()
.then( items => {
console.log('All items', items);
dispose(); // Increments the counter and closes the connection if total reached
})
collection.find({ name: /^S/ }).toArray()
.then( items => {
console.log("All items with field 'name' beginning with 'S'", items);
dispose(); // Increments the counter and closes the connection if total reached
);
collection.find({ age: 55 }).toArray()
.then( items => {
console.log("All items with field 'age' with value '55'", items);
dispose(); // Increments the counter and closes the connection if total reached
);
function dispose(){
if (++completedQueries >= totalQueries){
client.close();
}
}
You have 3 queries. As each one invokes dispose() the counter increments. When they've all invoked dispose(), the last one will also close the connection.
Async/Await should make it even easier, because they unwrap the Promise result from the then function.
async function test(){
const allItems = await collection.find({}).toArray();
const namesBeginningWithS = await collection.find({ name: /^S/ }).toArray();
const fiftyFiveYearOlds = await collection.find({ age: 55 }).toArray();
client.close();
}
test();
Below is an example of how Async/Await can end up making async code behave sequentially and run inefficiently by waiting for one async function to complete before invoking the next one, when the ideal scenario is to invoke them all immediately and only wait until they all are complete.
let counter = 0;
function doSomethingAsync(id, start) {
return new Promise(resolve => {
setTimeout(() => {
counter++;
const stop = new Date();
const runningTime = getSeconds(start, stop);
resolve(`result${id} completed in ${runningTime} seconds`);
}, 2000);
});
}
function getSeconds(start, stop) {
return (stop - start) / 1000;
}
async function test() {
console.log('Awaiting 3 Async calls');
console.log(`Counter before execution: ${counter}`);
const start = new Date();
let callStart = new Date();
const result1 = await doSomethingAsync(1, callStart);
callStart = new Date();
const result2 = await doSomethingAsync(2, callStart);
callStart = new Date();
const result3 = await doSomethingAsync(3, callStart);
const stop = new Date();
console.log(result1, result2, result3);
console.log(`Counter after all ran: ${counter}`);
console.log(`Total time to run: ${getSeconds(start, stop)}`);
}
test();
Note: Awaiting like in the example above makes the calls sequential again. If each takes 2 seconds to run, the function will take 6 seconds to complete.
Combining the best of all worlds, you would want to use Async/Await while running all calls immediately. Fortunately, Promise has a method to do this, so test() can be written like this: -
async function test(){
let [allItems, namesBeginningWithS, fiftyFiveYearOlds] = await Promise.all([
collection.find({}).toArray(),
collection.find({ name: /^S/ }).toArray(),
collection.find({ age: 55 }).toArray()
]);
client.close();
}
Here's a working example to demonstrate the difference in performance: -
let counter = 0;
function doSomethingAsync(id, start) {
return new Promise(resolve => {
setTimeout(() => {
counter++;
const stop = new Date();
const runningTime = getSeconds(start, stop);
resolve(`result${id} completed in ${runningTime} seconds`);
}, 2000);
});
}
function getSeconds(start, stop) {
return (stop - start) / 1000;
}
async function test() {
console.log('Awaiting 3 Async calls');
console.log(`Counter before execution: ${counter}`);
const start = new Date();
const [result1, result2, result3] = await Promise.all([
doSomethingAsync(1, new Date()),
doSomethingAsync(2, new Date()),
doSomethingAsync(3, new Date())
]);
const stop = new Date();
console.log(result1, result2, result3);
console.log(`Counter after all ran: ${counter}`);
console.log(`Total time to run: ${getSeconds(start, stop)}`);
}
test();
other people have touched on this but I just want to highlight that .toArray() is executed asynchronously so you need to make sure that it has finished before closing the session
this won't work
const randomUser = await db.collection('user').aggregate([ { $sample: { size: 1 } } ]);
console.log(randomUser.toArray());
await client.close();
this will
const randomUser = await db.collection('user').aggregate([ { $sample: { size: 1 } } ]).toArray();
console.log(randomUser);
await client.close();
client.connect(err => {
const collection = client
.db("sample_airbnb")
.collection("listingsAndReviews");
const test = collection.find({}).toArray((err, result) => {
if (err) throw err;
client.close();
});
});

Export a dynamic variable

I'm trying to export a variable in node.js like this:
let news = [];
const fetchNews = new Promise ((resolve, reject) => {
let query = 'SELECT id, name FROM news';
mysql.query(query, [], (error, results) => {
if (error)
reject({error: `DB Error: ${error.code} (${error.sqlState})`})
results = JSON.parse(JSON.stringify(results));
news = results;
resolve(results);
});
});
if(!news.length)
fetchNews
.then(results => {news = results})
.catch(err => {console.log('Unable to fetch news', err)});
exports.news = news;
When I use this code in some other module like this:
const news = require('./news.js').news;
console.log(news);
//returns [];
Can somebody point out my mistake in first code?
There are a couple of things that seem odd in the way you are doing this:
You have an async operation but you want just the value without actually awaiting on the operation to complete. Try something like this:
module.exports = new Promise ((resolve, reject) => {
mysql.query('SELECT id, name FROM news', (error, results) => {
if (error)
reject({error: `DB Error: ${error.code} (${error.sqlState})`})
resolve(JSON.parse(JSON.stringify(results)));
});
});
Then to get the news:
var getNewsAsync = require('./news')
getNewsAsync.then(news => console.log(news))
It would be cleaner/shorter if you actually utilize async/await with the mysql lib.
Update:
With Node 8 and above you should be able to promisify the mySQL lib methods. Although there might be better npm options out there to get this to work. Here is an untested version:
const mysql = require('mysql');
const util = require('util');
const conn = mysql.createConnection({yourHOST/USER/PW/DB});
const query = util.promisify(conn.query).bind(conn);
module.exports = async () => {
try {return await query('SELECT id, name FROM news')} finally {conn.end()}
}
To get the news:
var getNewsAsync = require('./news')
console.log(await getNewsAsync())

Likely, easy promises error with ES6

I'm going nuts with this code. I had tried everything I could think off, and I know is promises-related...but I can't get it working!
The original code is not as simple as the one I'm sharing, but this is te core of the problem:
Lets have two filled arrays, and two empty arrays.
Then 'crearMazo' must run a loop of one of the filled arrays, searching on my mongodb (mongoose) for those strings, and pushing the resulting _id on one of the empty arrays.
Well, It doesn't work. The last console.log show an empty array, even when the console.log inside the loop do print the array.
I know...I'm doing the promises wrong (obviously)...but I can't find where :(
var cartas = ['Lorem', 'Lorem2', 'Lorem3', 'Lorem4', 'Lorem5', 'Lorem6', 'Lorem7', 'Lorem8'];
var cartas2 = ['Lorem', '2Lorem', '3Lorem', '4Lorem', '5Lorem', '6Lorem', '7Lorem', '8Lorem'];
var newMazo = [];
var newMazo2 = [];
let crearMazo = function (c,m) {
return new Promise((resolve, reject) => {
setTimeout(() => {
for(var i in c){
Card.findOne({'nombre': c[i]}, '_id').then( carta => {
m.push(carta._id);
});
}
resolve(m);
}, 0);
});
};
crearMazo(cartas,newMazo)
.then(crearMazo(cartas2,newMazo2))
.then(() => {
console.log('mazo: '+ newMazo);
console.log('mazo: '+ newMazo2);
});
You must pass a callback as the argument to then, not a promise. You could use
crearMazo(cartas,newMazo)
.then(() => crearMazo(cartas2,newMazo2))
.then(() => {
console.log('mazo: '+ newMazo);
console.log('mazo: '+ newMazo);
});
but the proper solution would be to run them in parallel, and use their respective results:
Promise.all([
crearMazo(cartas, []),
crearMazo(cartas2, [])
]).then(([newMazo, newMazo2]) => {
console.log('mazo1: '+ newMazo);
console.log('mazo2: '+ newMazo2);
});
Also you are starting asynchronous actions in a loop here, and creating multiple promises for them - without awaiting any of them. You'll want
function crearMazo(c, m) {
return new Promise((resolve) => setTimeout(resolve, 0)) // is that actually needed?
.then(() => {
var promises = c.map(n =>
Card.findOne({'nombre': n}, '_id').then(carta => carta._id);
);
return Promise.all(promises);
// ^^^^^^^^^^^
}).then(res =>
m.concat(res)
);
}

Resources