Interdependent Transactions with pg-promise - node.js

I am trying to build an app involves posts and tags for posts. For these I have a post, tags and post_tag table. tags has the tags I have defined before hand and in somewhere in the app is suggested to the user on the front-end. post_tag table holds the post and tag ids as pairs on each row.
I use express.js and postgreql and pg-promise.
As far as I know I need a transactional query(ies) for a create post operation.
Also I need a mechanism to detect if a tag was not in tags table when the user created the post, so that I can insert it on the fly, and I have a tag_id for each tag that is neccessary to use in insertion of the post_id and tag_id into post_tag table. Otherwise, I will have a foreign key error since I need to post_tag table's columns post_id and tag_id to reference posts and tags table id columns, respectively.
Here is the url function I use for this I have used so far unsuccessful:
privateAPIRoutes.post('/ask', function (req, res) {
console.log('/ask req.body: ', req.body);
// write to posts
var post_id = ''
var post_url = ''
db.query(
`
INSERT INTO
posts (title, text, post_url, author_id, post_type)
VALUES
($(title), $(text), $(post_url), $(author_id), $(post_type))
RETURNING id
`,
{
title: req.body.title,
text: req.body.text,
post_url: slug(req.body.title),
author_id: req.user.id,
post_type: 'question'
} // remember req.user contains decoded jwt saved by mw above.
)
.then(post => {
console.log('/ask post: ', post);
post_id = post.id
post_url = post.post_url
// if tag deos not exist create it here
var tags = req.body.tags;
console.log('2nd block tags1', tags);
for (var i = 0; i < tags.length; i++) {
if (tags[i].id == undefined) {
console.log('req.body.tags[i].id == undefined', tags[i].id);
var q1 = db.query("insert into tags (tag) values ($(tag)) returning id", {tag: tags[i].label})
.then(data => {
console.log('2nd block tags2', tags);
tags[i].id = data[0].id
// write to the post_tag
db.tx(t => {
var queries = [];
for (var j = 0; j < tags.length; j++) {
var query = t.query(
`
INSERT INTO
post_tag (post_id, tag_id)
VALUES
($(post_id), $(tag_id))
`,
{
post_id: post_id,
tag_id: tags[j].id
}
)
queries.push(query);
}
return t.batch(queries)
})
.then(data => {
res.json({post_id: post_id, post_url: post_url})
})
.catch(error => {
console.error(error);
})
})
.catch(error => {
console.error(error);
});
}
}
})
.catch(error => {
console.error(error);
})
});

The main problem you have - you can't use the root-level db object inside a task or transaction. Trying to create a new connection while inside a transaction breaks the transaction logic. You would need to use t.tx in such cases. However, in your case I don't see that you need it at all.
corrected code:
privateAPIRoutes.post('/ask', (req, res) => {
console.log('/ask req.body: ', req.body);
db.tx(t => {
return t.one(
`
INSERT INTO
posts (title, text, post_url, author_id, post_type)
VALUES
($(title), $(text), $(post_url), $(author_id), $(post_type))
RETURNING *
`,
{
title: req.body.title,
text: req.body.text,
post_url: slug(req.body.title),
author_id: req.user.id,
post_type: 'question'
} // remember req.user contains decoded jwt saved by mw above.
)
.then(post => {
console.log('/ask second query: post[0]: ', post);
console.log('/ask second query: tags: ', req.body.tags);
console.log('/ask second query: tags[0]: ', req.body.tags[0]);
// the key piece to the answer:
var tagIds = req.body.tags.map(tag => {
return tag.id || t.one("insert into tags(tag) values($1) returning id", tag.label, a=>a.id);
});
return t.batch(tagIds)
.then(ids => {
var queries = ids.map(id => {
return t.one(
`
INSERT INTO post_tag (post_id, tag_id)
VALUES ($(post_id), $(tag_id))
RETURNING post_id, tag_id
`,
{
post_id: post.id,
tag_id: id
}
)
});
return t.batch(queries);
});
});
})
.then(data => {
// data = result from the last query;
console.log('/api/ask', data);
res.json(data);
})
.catch(error => {
// error
});
});
The key here is simply to iterate through the tag id-s, and for the ones that are not set - use an insert. Then you settle them all by passing the array into t.batch.
Other recommendations:
You should use method one when executing an insert that returns the new record columns.
You should use try/catch only once there, on the transaction. This is relevant to how to use promises, and not just for this library
You can place your queries into external SQL files, see Query Files
To understand conditional inserts better, see SELECT->INSERT

Related

Node JS mongoose insert data at the beginning

I'm experimenting with mongodb using mongoose in NodeJS.
I have a simple web where a user can create a post.
On creation this post is saved to mongodb.
On the web, i have a scroll event listener which checks if the user is on the bottom of the page or not. If he is on the bottom, it will do a fetch to the backend to get more posts.
I want these to be retrived from the db from newest to oldest, but the model.save() method of mongoose always inserts a new model at the end of the collection.
So when a new post created the backend does this right now:
const post = new Post({
text: req.body.text,
author: {
userid: user._id,
name: user.username,
picPath: user.picPath
},
images: images
});
post.save(function (err) {
let status = true;
let message = "Post mentve."
if (err) { status = false; message = err; console.log(err) }
return res.send({ status: status, msg: message });
})
This way, a new post pushed to the collection. And not unshifted.
When the client wants new posts the backend does this:
app.get('/dynamicPostLoad/:offset/:limit', async (req, res) => {
let offset = req.params.offset;
let limit = req.params.limit;
let response = {
status: true,
posts : [],
message: "Fetched"
};
await Post.find({}).skip(offset).limit(limit).then(products => {
response.posts = products;
}).catch((err)=> {
response.status = false;
response.message = err;
});
return res.send(response);
});
So the mongoose will fetch from oldest to newst since all the new is inserted at the end of the collection.
That way, the user will see the oldest post first and as he scrolls, sees the oldest and oldest posts.
I was thinking on three ways.
Either the Post.find({}) method should crawl the documents from the end of the collection or the Post.save() method should unshift the document instead of push or i could find all the posts in the collection and reverse them. ( the last one would be painfully slow )
EDIT: Every post contains a creation date, so it could be sorted.
How can i achive this?
I solved with sort. ( still don't understand why i can't insert a document to the beginning of a collection )
Here is my solution:
app.get('/dynamicPostLoad/:offset/:limit', async (req, res) => {
let offset = req.params.offset;
let limit = req.params.limit;
let response = {
status: true,
posts : [],
message: "Fetched"
};
// Sort every find by created date before limiting.
await Post.find({}).sort({created: -1}).skip(offset).limit(limit).then(products => {
response.posts = products;
}).catch((err)=> {
response.status = false;
response.message = err;
});
return res.send(response);
});

Two Knex queries (node.js) and one of it - in forEach loop

One table is UserChats with list of chats-to-users.
The other table is ChatList with all chats.
I need to build an array of chats of a specific user into variable and return it.
_DB("UserChats").select('*').where({
uID: user_id
}).then(chats => {
const userChats = []
chats.forEach(chat => {
_DB('ChatList').where({
id: chat.chatID
}).select('*').then(chat_data => {
userChats.push(chat_data);
});
});
return userChats; // but this variable is empty
});
How can I elegantly rewrite the code so that all the code is executed?
The join query will be as follows:
_DB('UserChats')
.select('*')
.where({ "UserChats.uID": user_id })
.join('ChatList', 'UserChats.chatID', 'ChatList.id')
.then(function(rows) {
return rows;
});

Only process 500 lines/row at a time createReadStream

I have to read a really large CSV file so search through the google and get to know about createReadStream. I am using a program that read the csv file data and insert it into the mongoDB.
process I am following
process the data using createReadStream (I think it read the file line by line).
Storing data into an array.
Insert the data into mongoDB using insertMany
Now the problem is whole file is first get stored into an array and then I insert into the database.
But what I think is the better approach would be I only store first 500 line/rows into an array insert it into the DB and again follow the same step for the next 500 records
Is it possible to achieve this ?
and also is it the right way to do this ?
my program
const test = async () => {
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow) {
try{
stream.pause()
if(!authorName.includes(csvrow.author)) {
const author = new Author({author: csvrow.author})
authorId = author._id
authorName.push(author.author)
authorData.push(author)
}
if(!companyName.includes(csvrow.company_name)) {
const company = new Company({companyName: csvrow.company_name})
companyID = company._id
companyName.push(company.companyName)
companyData.push(company)
}
users = new User({
name: csvrow.firstname,
dob: csvrow.dob,
address: csvrow.address,
phone: csvrow.phone,
state: csvrow.state,
zip: csvrow.zip,
email: csvrow.email,
gender: csvrow.gender,
userType: csvrow.userType
})
userData.push(users)
book = new Book({
book_number: csvrow.book_number,
book_name: csvrow.book_name,
book_desc: csvrow.book_desc,
user_id: users._id,
author_id: authorId
})
bookData.push(book)
relationalData.push({
username: users.name,
author_id: authorId,
book_id: book._id,
company_id: companyID
})
}finally {
stream.resume()
}
})
.on('end', async function() {
try {
Author.insertMany(authorData)
User.insertMany(userData)
Book.insertMany(bookData)
Company.insertMany(companyData)
await Relational.insertMany(relationalData)
parentPort.postMessage("true")
}catch(e){
console.log(e)
parentPort.postMessage("false")
}
})
}
test()
This program is working fine also inserting the data into the DB, But I am looking for something like this:
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow, maxLineToRead: 500) {
// whole code/logic of insert data into DB
})
so maxLineToRead is my imaginary term.
basically my point is I want to process 500 data at a time and insert it into the DB and want to repeat this process till the end.
You can create a higher scoped array variable where you accumulate rows of data as they arrive on the data event. When you get to 500 rows, fire off your database operation to insert them. If not yet at 500 rows, then just add the next one to the array and wait for more data events to come.
Then, in the end event insert any remaining rows still in the higher scoped array.
In this way, you will insert 500 at a time and then however many are left at the end. This has an advantage vs. inserting them all at the end that you spread out the database load over the time you are parsing.
Here's an attempt to implement that type of processing. There are some unknowns (documented with comments) based on an incomplete description of exactly what you're trying to accomplish in some circumstances):
const test = () => {
return new Promise((resolve, reject) => {
const accumulatedRows = [];
async function processRows(rows) {
// initialize data arrays that we will insert
const authorData = [],
companyData = [],
userData = [],
bookData = [],
relationalData = [];
// this code still has a problem that I don't have enough context
// to know how to solve
// If authorName contains csvrow.author, then the variable
// authorId is not initialized, but is used later in the code
// This is a problem that needs to be fixed.
// The same issue occurs for companyID
for (let csvrow of rows) {
let authorId, companyID;
if (!authorName.includes(csvrow.author)) {
const author = new Author({ author: csvrow.author })
authorId = author._id
authorName.push(author.author)
authorData.push(author)
}
if (!companyName.includes(csvrow.company_name)) {
const company = new Company({ companyName: csvrow.company_name })
companyID = company._id
companyName.push(company.companyName)
companyData.push(company)
}
let users = new User({
name: csvrow.firstname,
dob: csvrow.dob,
address: csvrow.address,
phone: csvrow.phone,
state: csvrow.state,
zip: csvrow.zip,
email: csvrow.email,
gender: csvrow.gender,
userType: csvrow.userType
});
userData.push(users)
let book = new Book({
book_number: csvrow.book_number,
book_name: csvrow.book_name,
book_desc: csvrow.book_desc,
user_id: users._id,
author_id: authorId
});
bookData.push(book)
relationalData.push({
username: users.name,
author_id: authorId,
book_id: book._id,
company_id: companyID
});
}
// all local arrays of data are populated now for this batch
// so add this data to the database
await Author.insertMany(authorData);
await User.insertMany(userData);
await Book.insertMany(bookData);
await Company.insertMany(companyData);
await Relational.insertMany(relationalData);
}
const batchSize = 50;
const stream = fs.createReadStream(workerData)
.pipe(parse())
.on('data', async function(csvrow) {
try {
accumulatedRows.push(csvRow);
if (accumulatedRows.length >= batchSize) {
stream.pause();
await processRows(accumulatedRows);
// clear out the rows we just processed
acculatedRows.length = 0;
stream.resume();
}
} catch (e) {
// calling destroy(e) will prevent leaking a stream
// and will trigger the error event to be called with that error
stream.destroy(e);
}
}).on('end', async function() {
try {
await processRows(accumulatedRows);
resolve();
} catch (e) {
reject(e);
}
}).on('error', (e) => {
reject(e);
});
});
}
test().then(() => {
parentPort.postMessage("true");
}).catch(err => {
console.log(err);
parentPort.postMessage("false");
});

Firebase transaction to update multiple fields in a document at once

I am trying to read a single field in a firestore document, increment the field by one and update the field along side two other fields in the document.
It seems firebase transaction update() function accept a JSON object with only one field and value because when I add other fields to the JSON, the update fails.
This works:
t.update(referralCodesDocRef, {field1: value1});
This does not work:
t.update(referralCodesDocRef, {
field1: value1,
field2: value2,
field3: value3
});
Also this does not work:
t.update(referralCodesDocRef, {field1: value1});
t.update(referralCodesDocRef, {field2: value2});
t.update(referralCodesDocRef, {field3: value3});
Here is the function that does the transaction
function runIncreaseCountTransaction(referralCodesDocRef){
return db.runTransaction(t => {
return t.get(referralCodesDocRef)
.then(doc => {
console.log(doc);
let newReferralCount = doc.data().referral_count + 1;
if(newReferralCount === max_referral_count){
const current_time_millis = Date.now();
const end_time_millis = current_time_millis+(180*1000); // ends after 3 mins
t.update(referralCodesDocRef, {referral_count: newReferralCount});
t.update(referralCodesDocRef, { timer_start_time: current_time_millis });
t.update(referralCodesDocRef, { timer_end_time: end_time_millis });
}
else{
t.update(referralCodesDocRef, { referral_count: newReferralCount });
}
return Promise.resolve(newReferralCount);
})
.then(result => {
console.log('Success: Update successful: Referral count incremented!!', result);
return true;
}).catch(err => {
console.log('Error: could not update referral count', err);
});
});
}
So how can I achieve multiple fields update with firebase transactions?
None of the answers are correct (at least didn't work for me).
Kotlin
Here is my implementation. Very easy:
val db = Firebase.firestore
db.collection("Users")
.document("Ronaldo")
.update("famous", true,
"distance", 5)
.addOnSuccessListener {...
.addOnFailureListener {...
So basically add another comma after your first pair
There should be no problem at all to update a document with a JavaScript object composed of several properties, like
t.update(referralCodesDocRef, {
field1: value1,
field2: value2,
field3: value3
});
The problem most probably comes from the fact that you don't return the Transaction returned by the Transaction's update() method. The following should do the trick:
function runIncreaseCountTransaction(referralCodesDocRef){
return db.runTransaction(t => {
return t.get(referralCodesDocRef)
.then(doc => {
console.log(doc);
let newReferralCount = doc.data().referral_count + 1;
if (newReferralCount === max_referral_count) {
const current_time_millis = Date.now();
const end_time_millis = current_time_millis+(180*1000); // ends after 3 mins
return t.update(referralCodesDocRef,
{
referral_count: newReferralCount,
timer_start_time: current_time_millis,
timer_end_time: end_time_millis
});
} else{
return t.update(referralCodesDocRef, { referral_count: newReferralCount });
}
});
})
.then(result => {
console.log('Success: Update successful: Referral count incremented!!', result);
return null;
})
.catch(err => {
console.log('Error: could not update referral count', err);
return null; //Note the return here.
});
}
Kotlin
You can update multiple fields in a Firestore document using the field, value, field, value... format:
db.collection("users").doc("docID").update({
"field1", value1, // field, value
"field1, "value2" // field, value
})
If you want to update a nested field, you have a couple options.
Use the dot notation like you mentioned:
db.collection("users").doc("docID").update({
"field1", value1,
"field2.subfield2", value2,
"field2.subfield3", value3
})
Make your "value" a map:
db.collection("users").doc("docID").update({
"field1", value1,
"field2", mapOf(
"field3" to value3,
"field4" to value4
})

How to use json object with where clause?

What I'm trying to achieve
Find all players which is in the authenticated users team.
What is the Problem?
Unable to use the returned json within const findUsers = await User.findAll where clause and I am unsure if this is the correct way.
Database Tables
Users Table : id (PK) , etc
Teams: id (PK) , etc
TeamUsers: id , TeamID (Foreign Key) , UserID (Foreign Key) , etc
Returning Json from FindTeamUsers (Var ob) which is correct
[{"id":2,"TeamID":1,"UserID":1,"createdAt":"2019-08-09","updatedAt":"2019-08-09"},{"id":3,"TeamID":1,"UserID":3,"createdAt":"2019-08-09","updatedAt":"2019-08-09"},{"id":76,"TeamID":1,"UserID":5,"createdAt":"2019-08-22","updatedAt":"2019-08-22"}]
Below is the Route that I am currently using using Nodejs, ExpressJS
router.get('/Team', auth, async function(req, res) {
// -- Get the Users team that is currently Authenticated (req.user.id (auth) )
const findTeam = await TeamUsers.findOne({
where: {
UserID: req.user.id
}
});
//If the User has a team
if (findTeam) {
// -- Get the players Team Mates who have the matching TeamID
const findTeamUsers = await TeamUsers.findAll({
where: {
TeamID: findTeam.TeamID
}
});
//Store the object and Display in JSON FORMAT
var ob = JSON.stringify(findTeamUsers);
console.log(ob);
if (!findTeamUsers) {
console.log('error');
} else {
//find the Users Details From the Users Table Model
//findTeamUsers - Is an array of each record found from const findTeamUsers = await TeamUsers.findAll
const findUsers = await User.findAll({
where: {
id: ob.UserID
}
});
res.status(200).json(findUsers);
}
}
});
Your ob is a string so ob.UserID is undefined. findTeamUsers (FindTeamUsers result) is an array of object so findTeamUsers.UserID would be undefined too. (array findTeamUsers does not have property UserID).
You can pass an array of UserIDs to search multiple elements (if you want to find for all UserIDs in the array):
User.findAll({
where: {
id: findTeamUsers.map(o => o.UserID)
}
})

Resources