Why can't I rename this object in my node application? - node.js

I've got a node.js/express application communicating with a Postgres database using the node-postgres module. It's working with async/await, but if I rename the object being returned to anything other than { rows }, it comes back undefined. Here is my code; note the comment right under the async, and above const { rows } = ...:
var express = require('express');
var router = express.Router();
const { Pool } = require('pg');
const pool = new Pool({
connectionString: 'postgresql://postgres#localhost/mydb'
});
router.post('/login', function(req, res, next) {
var username = req.body.username;
var password = req.body.password;
(async () => {
// if I rename this from "rows" to, say, "userdetails", it comes back undefined
const { rows } = await pool.query(`
SELECT id, password
FROM myschema.users
WHERE username = $1
LIMIT 1;`,[username]);
if (rows.length) {
// check password, etc, etc.
return res.status(200).send();
} else {
return res.status(401).send();
}
})().catch(e => setImmediate(() => {
res.status(500);
}
));
});
module.exports = router;
I'm sure I'm missing something pretty basic here, but why can't I rename this? Does the pg module dictate somehow that the returned var/const must be named { rows }? If so, how might I discover that? I set a breakpoint at the await and step through the code, but it's still unclear to me.

When you do const { rows } = xxx, you are using object destructing to assign the xxx.rows property to a new variable in the local scope that has the same name as the property.
Thus when you change the name of that property to something else:
const { myVar } = xxx;
The code is now looking for the property xxx.myVar which likely does not exist and thus ends up undefined.
If you want to use a different name, then you have to either use a different form of object destructuring assignment (that includes a new name) or don't use destructuring at all and just assign the .rows property to a new variable.
For example, you could do this instead:
const result = await pool.query(`
SELECT id, password
FROM myschema.users
WHERE username = $1
LIMIT 1;`,[username]);
const myVar = result.rows;
Or you could designate a new name in the object destructuring assignment:
const {rows: myVar} = await pool.query(`
SELECT id, password
FROM myschema.users
WHERE username = $1
LIMIT 1;`,[username]);
console.log(myVar);

This is called Object destructuring. When you do something like this
const { row } = someObject
then the variable row is equal to the row property of someObject. But if someObject does not even have any row property, then row obviously will be undefined.
Same is happening in your case. The object returned by await pool.query(... have rows property, thats why const { rows } works but it does not have userdetails property hence undefined.

Related

"cannot insert multiple commands into a prepared statement" How do I return cursor results from a PostgreSQL Function in KOA service

I am using PostgreSQL FUNCTION to return result sets. I am having trouble getting my Node.js/KOA api to call it properly with parameters. I can get it to work without parameters, but the parameters turn it into a prepared statement. And the prepared statements don't like multiple commands.
Here is the database object and configuration that I use:
const { Pool } = require('pg');
const config = require('../configuration');
exports.pool = this.pool;
exports.start = async function() {
const host = config.get("PGHOST");
const user = config.get("PGUSER");
const port = config.get("PGPORT");
const password = config.get("PGPASSWORD");
const database = config.get("PGDATABASE");
this.pool = new Pool({ user, host, database, password, port });
console.log('Postgres database pool started.');
};
exports.close = async function() {
await this.pool.end;
};
exports.query = async function(query, data) {
let rs = await this.pool.query(query, data);
return rs;
};
Here is my KOA service (it uses the pg.Pool node module, and where my issue is, i think...):
let getFilteredDevelopers = async (developerId, firstName, lastName) => {
let query = {
text: `
BEGIN;
SELECT ks_get_filtered_developers($1, $2, $3);
FETCH ALL IN "ks_developers_cursor";
COMMIT;
`,
values: [ developerId, firstName, lastName ]
};
try {
let result = await database.query(query);
return result[2].rows;
} catch (error) {
return `Failed to fetch developers.`;
}
};
This approach works fine without the parameters. But when I add the parameters to the koa pg call, it throws the error: 'cannot insert multiple commands into a prepared statement'.
The following is my function:
CREATE OR REPLACE FUNCTION ks_get_filtered_developers (
p_developer_id NUMERIC,
p_first_name TEXT,
p_last_name TEXT
) RETURNS refcursor AS
$$
DECLARE
v_query TEXT = '';
v_where_clause TEXT = '';
v_developers_cursor refcursor = 'ks_developers_cursor';
BEGIN
IF (p_developer_id IS NOT NULL) THEN
v_where_clause = v_where_clause || FORMAT(' AND d.developer_id = %s ', p_developer_id);
END IF;
v_query = '
SELECT d.developer_id AS id, d.*
FROM ks_developers d
WHERE 1=1
' || v_where_clause || '
ORDER BY d.developer_id
';
OPEN v_developers_cursor FOR
EXECUTE v_query;
RETURN v_developers_cursor;
END;
$$
LANGUAGE plpgsql;
How can I implement this in the appropriate way? What am I missing or misunderstanding?
The problem was my lack of understanding on how to use the pg node module. Basically, I was trying to jam everything into one query, because I thought I had to. I forgot that I created a "helper" query in the database module, and that it was narrow. I got that confused with the pg pool query function name. I was shooting myself in the foot.
This is the article that showed me how multiple statements should be done with a transaction:
https://node-postgres.com/features/transactions
So for a quick fix on this question, I exposed the pool to myself and started querying the pool, and then released the connection. This allows for multiple prepared statements and actions.
Here is what I modified my koa to be:
let getFilteredDevelopers = async (developerId, firstName, lastName) => {
const client = await database.pool.connect();
try {
await client.query('BEGIN');
const selectQuery = `SELECT ks_get_filtered_developers($1, $2, $3)`;
await client.query(selectQuery, [ developerId, firstName, lastName ]);
const fetchQuery = `FETCH ALL IN "ks_developers_cursor"`;
const result = await client.query(fetchQuery);
client.query('COMMIT');
return result.rows;
} catch (error) {
client.query('ROLLBACK');
return `Failed to fetch developers.`;
} finally {
client.release();
}
};
I will be refactoring my code to handle this better, but I wanted to answer the question of why this code doesn't work, and what I was misunderstanding.

firestore cloud function update another document

I want to increment the value of the field "votes" in a document (item_id) in the collection items. I want a cloud function to do this for me every time a new document is added to the collection votes. The new document contains the item_id. does anyone know how I can do this? This is what I have now:
import * as functions from "firebase-functions";
import * as admin from "firebase-admin";
admin.initializeApp();
export const vote = functions.firestore.document("/Votes/{vote}")
.onCreate((snapshot, context) => {
const item = context.params.item_id;
const itemdoc = admin.firestore().collection("items").doc(item);
itemdoc.get().then((doc) => {
if (doc.exists) {
itemdoc.update({
"votes": admin.firestore.FieldValue.increment(1)})
.catch((err) => {
console.log("Error updating item vote", err);
});
}
});
});
In the firebase console logs that the path must be a non-empty string. Does anyone know what I do wrong? Since the path should not be empty.
The following should do the trick:
export const vote = functions.firestore.document("/Votes/{vote}")
.onCreate((snapshot, context) => {
const item = snapshot.data().item_id;
const itemDocRef = admin.firestore().collection("items").doc(item);
return itemDocRef.update({
"votes": admin.firestore.FieldValue.increment(1)
});
});
You need to use the data() method on snapshot, in order to get the JavaScript representation of the new document. Then you take the item_id property.
Another possibility is to use the get() method, as follows:
const item = snapshot.get("item_id");
I would suggest to rename the itemdoc variable to itemDocRef, since it is a DocumentReference.
Update following your comments:
If you want to read the item Doc after having updated it you should do as follows:
export const vote = functions.firestore.document("/Votes/{vote}")
.onCreate(async (snapshot, context) => {
const item = snapshot.data().item_id;
const itemDocRef = admin.firestore().collection("items").doc(item);
await itemDocRef.update({"votes": admin.firestore.FieldValue.increment(1)});
const itemDocSnapshot = await itemDocRef.get();
//Do whatever you want with the Snapshot
console.log(itemDocSnapshot.get("user_id"));
// For example update another doc
const anotherDocRef = admin.firestore().collection("....").doc("....");
await anotherDocRef.update({"user_id": itemDocSnapshot.get("user_id")});
return null;
});
Note the use of the async and await keywords.
const item = context.params.item_id;
By accessing context.params, you are trying to find a value in wildcard present in .document("/Votes/{vote}") which is undefined for sure. To read a field from document try this:
const {item_id} = snapshot.data()
// Getting item_id using Object destructuring
if (!item_id) {
// item_id is missing in document
return null
}
const itemdoc = admin.firestore().collection("items").doc(item_id);
// Pass item_id in doc ^^^^^^^
You can read more about onCreate in the documentation. The first parameter snapshot is the QueryDocumentSnapshot which contains your doc data and the second parameter context is EventContext.

Pass variable into collection(collectionName).doc(docName)

I am creating a cloud firestore function. The last step I need for now is using the userId to retrieve a document.
Here I get the userId
const userId = snap.data().userId; <<< THIS WORKS
console.log('A new transaction has been added');
Here I want insert the value from userId to retrieve the correct document.
const deviceDoc = db.collection('device').doc(**userId**); <<< THIS IS THE PROBLEM
const deviceData = await deviceDoc.get();
const deviceToken = deviceData.data().token;
I don't know how to use the variable, userId, to insert the value into the .doc(userId) to get the data.
If userId = 12345 I want the line to look like this:
const deviceDoc = db.collection('device').doc('12345');
I have tried .doc('userId'), .doc('${userId}'), as well as other things. None of these work.
How do I do this?
As Puf has responded, you can simply use doc(userId). The rest of your code looks fine, so maybe the document you are getting doesn't exist. Try the following:
const deviceRef = db.collection('device').doc(userId);
// you can shorten this to >> const deviceRef = db.doc(`device/${userId}`);
try {
const deviceDoc = await deviceRef.get();
if (!deviceDoc.exists) {
console.log(`The document for user ${userID} does not exist`);
} else {
const {token} = deviceDoc.data();
console.log('The token is:', token);
}
}
catch (err) {
console.error(err);
}

ES6 Async/Await, ExpressJS and Postgres transactions

REVISED QUESTION
I've revised the question, in the hope of getting a clearer answer.
I'm trying to process data in ExpressJS, based on the incoming req.body and the existing data in the table.
I'm receiving a req.body that contains a JSON list of updated fields. Some of those fields are stored as JSONB in Postgres. If an incoming field is JSONB, then the form (external code) that is making the request has already run a jsonpatch.compare() to generate the list of patches, and it is these patches and not the full values that are being passed in. For any non-JSONB values, incoming values just need to be passed through to the UPDATE query.
I have a working version, as below, that pretends that the existing JSONB values in the table ARE NULL. Clearly, this is NOT what is needed. I need to pull the values from the db. The non-querying-of-current-values version and a bare minimum router, looks like this:
const express = require('express')
const bodyParser = require('body-parser')
const SQL = require('sql-template-strings')
const { Client } = require('pg')
const dbConfig = require('../db')
const jsonpatch = require('fast-json-patch')
const FormRouter = express.Router()
I have some update code:
````javascript
const patchFormsRoute = (req, res) => {
const client = new Client(dbConfig)
const { id } = req.body
const parts = []
const params = [id]
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
// this is a DUMMY RECORD instead of the result of a client.query
let currentRecord = { 'sections':[], 'editors':[], 'descriptions':[] }
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery = 'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
client.connect()
return client
.query(updateQuery, params)
.then(result => res.status(200).json(result.rowCount))
.catch(err => res.status(400).json(err.severity))
.then(() => client.end())
}
FormRouter.route('/')
.patch(bodyParser.json({ limit: '50mb' }), patchFormsRoute)
exports.FormRouter = FormRouter
I promise, that this is working code, which does almost what I need. However, I want to replace the dummy record with the data already in the table, fetched contemporaneously. My issue, is because multiple clients could be updating a row at the same time (but looking at orthogonal elements of the JSONB values), I need the fetch, calc, and update to happen as a SINGLE TRANSACTIOn. My plan is to:
BEGIN a transaction
Query Postgres for the current row value, based on the incoming id
For any JSONB fields, apply the patch to generate the correct value for that field in the UPDATE statement.
Run the UPDATE statement with the appropriate param values (either from the req.body or the patched row, depending on whether the field is JSONB or not)
COMMIT the transaction, or ROLLBACK on error.
I've tried implementing the answer from #midrizi; maybe it's just me, but the combination of awaits and plain testing of res sends the server off into Hyperspace... and ends in a timeout.
In case anyone is still awake, here's a working solution to my issue.
TLDR; RTFM: A pooled client with async/await minus the pooling (for now).
const patchFormsRoute = (req, res) => {
const { id } = req.body
// list of JSONB fields for the 'forms' table
const jsonFields = [
'sections',
'editors',
'descriptions',
]
// list of all fields, including JSONB fields in the 'forms' table
const possibleFields = [
'status',
'version',
'detail',
'materials',
...jsonFields,
]
const parts = []
const params = [id]
;(async () => {
const client = await new Client(dbConfig)
await client.connect()
try {
// begin a transaction
await client.query('BEGIN')
// get the current form data from DB
const fetchResult = await client.query(
SQL`SELECT * FROM forms WHERE id = ${id}`,
)
if (fetchResult.rowCount === 0) {
res.status(400).json(0)
await client.query('ROLLBACK')
} else {
const currentRecord = fetchResult.rows[0]
// patch JSONB values or update non-JSONB values
let val = []
possibleFields.forEach(myProp => {
if (req.body[myProp] != undefined) {
parts.push(`${myProp} = $${params.length + 1}`)
if (jsonFields.indexOf(myProp) > -1) {
val = currentRecord[myProp]
jsonpatch.applyPatch(val, req.body[myProp])
params.push(JSON.stringify(val))
} else {
params.push(req.body[myProp])
}
}
})
const updateQuery =
'UPDATE forms SET ' + parts.join(', ') + ' WHERE id = $1'
// update record in DB
const result = await client.query(updateQuery, params)
// commit transaction
await client.query('COMMIT')
res.status(200).json(result.rowCount)
}
} catch (err) {
await client.query('ROLLBACK')
res.status(400).json(err.severity)
throw err
} finally {
client.end()
}
})().catch(err => console.error(err.stack))
}

Cloud Functions for Firestore: accessing parent collection data

Many blogs suggest to switch to Cloud Firestore because it's easy and well secured. Coming from Realtime Database and back when using Functions + RD it was easy to navigate through document triggers, like ref.parent
My setup is like this:
Users
{userid}
last_seen: "data"
{forms}
{formid}
However, i have added a document trigger with onCreate, and i want to get the value of last_seen:
exports.updateUser = functions.firestore.document('users/{userId}/forms/{formid}').onCreate((snap, context) => {
const newValue = snap.data();
console.log("test value : " + newValue.test); // works
console.log("form id: " + context.params.formid); // works
console.log("user last seen : " + newValue.last_seen); // doesn't work, can't access the parent collection data
});
I totally get the confusion with the switch to Firestore but it's almost the exact same way in this case.
In realtime, you have the snapshot:
exports.doStuff = functions.database.ref('/users/{userId}/forms/{formId}')
.onCreate((snapshot, context) => {
const ref = snapshot.ref;
const userRef = ref.parent.parent;
userRef.once('value').then(parentSnap => {
const user = parentSnap.val();
const lastSeen = user.last_seen;
});
});
In Firestore:
exports.doStuff = functions.firestore.document.onCreate('/users/{userId}/forms/{formId}')
.onCreate((snapshot, context) => {
const ref = snapshot.ref;
const userRef = ref.parent.parent;
userRef.get().then(parentSnap => {
const user = parentSnap.data();
const lastSeen = user.last_seen;
});
});
Another thing to consider is you are passing the userId in your params so you could just build your own DocumentReference (assuming you're also using firebaseAdmin)
functions.firestore.document.onCreate('/users/{userId}/forms/{formId}')
.onCreate((snapshot, context) => {
const userId = context.params.userId;
const userRef = firebaseAdmin.firestore().collection('users').doc(userId);
userRef.get().then(parentSnap => {
const user = parentSnap.data();
const lastSeen = user.last_seen;
});
});
It also allows you to decouple your logic for functions you may use often, consider it as a "helper" method: (NOTE, I switched to async/await on accident, it's a bit cleaner)
functions.firestore.document.onCreate('/users/{userId}/forms/{formId}')
.onCreate(async (snapshot, context) => {
const userId = context.params.userId;
const lastSeen = await getLastSeen(userId);
});
// == Helper Functions ==-------------------
export async getLastSeen(userId) {
if (!userId) return Promise.reject('no userId');
// User Ref
const userSnap = await firebaseAdmin.firestore().collection('users').doc(userId).get();
return userSnap.data().last_seen;
}
Now you can use getLastSeen() whenever you need it, and if you make a change you only have to adjust that one function. If it's not something you call often then don't worry about it, but I would consider maybe a getUser() helper...
In your code, snap is a DocumentSnapshot type object. As you can see from the linked API documentation, there is a ref property on that object that gets you a DocumentReference object pointing to the document that was added. That object has parent property that gives you a CollectionReference that points to the collection where the document exists, which also has a parent property. So, use these properties to navigate around your database as needed.
Get the reference where the change took place, move 2 levels up and capture data using ref.once() function:
exports.updateUser = functions.firestore.document('users/{userId}/forms/{formid}').onCreate( async (snap, context) => {
// Get the reference where the change took place
const changeRef = snap.after.ref;
// Move to grandad level (2 levels up)
const userIdRef = changeRef.parent.parent;
// Capture data
const snapshot = await userIdRef.once('value');
// Get variable
const lastSeen = snapshot.val().last_seen;
// Do your stuff...
return null;
});

Resources