How to convert snapshot.val() to object in node js? - node.js

I am trying to write a firebase-cloud-function.I am trying to parse data here is the data structure to object(model/pojo) or Hashmap as I believe everything in JS is a map, So i have written this code
exports.checkAllBookings = functions.https.onRequest((request,response)=>{
admin.database().ref(`/bookings`).once('value').then((snapshot)=>{
return snapshot.val()
}).then((bookingDetails)=>{
var map = new HashMap()
map = bookingDetails
console.log(map.size)
map.forEach(function(value,key){
console.log(value)
})
// console.log(bookingDetails.keys)
// bookingDetails.forEach(function(childSnap){
// var item = childSnap.val()
// console.log("item ::::: "+ item)
// })
response.send(map)
})
})
& this is how it completely looks alike complete pic. I have install Hashmap depandency from here Hashmap npm install. But after all this I am keep getting error here. So how can I parse that data to a bean or pojo or hashmap in node.js so that I can move further to complete my task?? thanks for any help

It's hard to determine what you're trying to do. But if you're simply trying to return the data to the called, this is the simplest approach:
exports.checkAllBookings = functions.https.onRequest((request,response)=>{
admin.database().ref(`/bookings`).once('value').then((snapshot)=>{
response.send(snapshot.val());
})
})
If you're for example trying to filter the data before sending it back to the user, you can use the Snapshot.forEach method:
exports.checkAllBookings = functions.https.onRequest((request,response)=>{
admin.database().ref(`/bookings`).once('value').then((snapshot)=>{
result = [];
snapshot.forEach(function(bookingSnapshot) {
var booking = bookingSnapshot.val();
if (booking.someProperty === true) {
result.push(booking);
}
});
response.send(map)
})
})
The if (booking.someProperty === true) condition here is whatever you want it to be. You could do a similar operation to add more information for each booking.

Related

Unable to return/get response from closure

I want to get the result from the closure function. I did try doing some callback functions too but still I'm getting the same issue. I was able to console.log the result inside the closure/callback but can't return the response into a variable. I tried some of the solutions posted already but weren't able to succeed.
Here's my code:
var fcmTokens = [input.fcmToken];
switch(topicType) {
case 'post':
const username = (input.username).toLowerCase();
const userPrefix = Config.get(`setting.topic.user.prefix`);
fcmTokens = Topic.get(`${userPrefix}-${username}`, {AttributesToGet : ['fcmToken']}, function(err,foundTopic) {
var result = foundTopic.attrs.fcmToken;
console.log(result) //Able to log my expected result
return result;
})
console.log(fcmTokens) //undefined
break;
}
I was able to resolve my own issue by installing a library called promisified-vogels as my Model is using a DynamoDB.
From my current code above to this:
let userTopic = await Topic
.getAsync(`${userPrefix}-${username}`,{ConsistentRead: true, AttributesToGet : ['fcmToken']}) //from original function "get" changed to "getAsync"
.then(function(user){
return user.attrs.fcmToken;
})
.catch(function(err){
console.log(err)
});
fcmTokens = userTopic; // i was able to get the list of record im expecting.
Reference Library : https://github.com/servel333/vogels-promisified

Correct way to organise this process in Node

I need some advice on how to structure this function as at the moment it is not happening in the correct order due to node being asynchronous.
This is the flow I want to achieve; I don't need help with the code itself but with the order to achieve the end results and any suggestions on how to make it efficient
Node routes a GET request to my controller.
Controller reads a .csv file on local system and opens a read stream using fs module
Then use csv-parse module to convert that to an array line by line (many 100,000's of lines)
Start a try/catch block
With the current row from the csv, take a value and try to find it in a MongoDB
If found, take the ID and store the line from the CSV and this id as a foreign ID in a separate database
If not found, create an entry into the DB and take the new ID and then do 6.
Print out to terminal the row number being worked on (ideally at some point I would like to be able to send this value to the page and have it update like a progress bar as the rows are completed)
Here is a small part of the code structure that I am currently using;
const fs = require('fs');
const parse = require('csv-parse');
function addDataOne(req, id) {
const modelOneInstance = new InstanceOne({ ...code });
const resultOne = modelOneInstance.save();
return resultOne;
}
function addDataTwo(req, id) {
const modelTwoInstance = new InstanceTwo({ ...code });
const resultTwo = modelTwoInstance.save();
return resultTwo;
}
exports.add_data = (req, res) => {
const fileSys = 'public/data/';
const parsedData = [];
let i = 0;
fs.createReadStream(`${fileSys}${req.query.file}`)
.pipe(parse({}))
.on('data', (dataRow) => {
let RowObj = {
one: dataRow[0],
two: dataRow[1],
three: dataRow[2],
etc,
etc
};
try {
ModelOne.find(
{ propertyone: RowObj.one, propertytwo: RowObj.two },
'_id, foreign_id'
).exec((err, searchProp) => {
if (err) {
console.log(err);
} else {
if (searchProp.length > 1) {
console.log('too many returned from find function');
}
if (searchProp.length === 1) {
addDataOne(RowObj, searchProp[0]).then((result) => {
searchProp[0].foreign_id.push(result._id);
searchProp[0].save();
});
}
if (searchProp.length === 0) {
let resultAddProp = null;
addDataTwo(RowObj).then((result) => {
resultAddProp = result;
addDataOne(req, resultAddProp._id).then((result) => {
resultAddProp.foreign_id.push(result._id);
resultAddProp.save();
});
});
}
}
});
} catch (error) {
console.log(error);
}
i++;
let iString = i.toString();
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(iString);
})
.on('end', () => {
res.send('added');
});
};
I have tried to make the functions use async/await but it seems to conflict with the fs.openReadStream or csv parse functionality, probably due to my inexperience and lack of correct use of code...
I appreciate that this is a long question about the fundamentals of the code but just some tips/advice/pointers on how to get this going would be appreciated. I had it working when the data was sent one at a time via a post request from postman but can't implement the next stage which is to read from the csv file which contains many records
First of all you can make the following checks into one query:
if (searchProp.length === 1) {
if (searchProp.length === 0) {
Use upsert option in mongodb findOneAndUpdate query to update or upsert.
Secondly don't do this in main thread. Use a queue mechanism it will be much more efficient.
Queue which I personally use is Bull Queue.
https://github.com/OptimalBits/bull#basic-usage
This also provides the functionality you need of showing progress.
Also regarding using Async Await with ReadStream, a lot of example can be found on net such as : https://humanwhocodes.com/snippets/2019/05/nodejs-read-stream-promise/

How to get a specific item from JSON

I'm new to node.js and DialogFlow. I am using DynamoDB to store data and I'm creating skills on Google. I'm trying to write a code to retrieve a specific item on that table.
I've got it working to show all items where ID is equal = 1, but how would I make it so I can just get attribute 'name'?
My idea is a user provides an id then the code will retrieve name where id was 1 and store that as a variable and use agent.add('hello $(name)'); to display it back as speech to the user.
function readdata(agent){
let dbread = new aws.DynamoDB.DocumentClient();
const id = agent.parameters.id;
let read = function(){
var parameters = {
TableName:"Dynamodb",
Key:{
"id":id
}
};
dbread.get(parameters, function(err,data){
if(err){
console.log("error",JSON.stringify(data,null,2));
}else{
console.log("success",JSON.stringify(data,null,2));
}
});
agent.add(`hello ${name}`);
};
read();
}
Once you have the data back from the get() call, the data object will contain an Item attribute. The value of this attribute will be another object that contains the attribute/value pairs for this record, or be empty if the record isn't found.
The debugging you have in place that shows JSON.stringify(data) should show this.
Assuming you knew all the fields were there, you could do something like
const name = data.Item.name;
a more robust way using current JavaScript would be to make sure everything was assigned, otherwise return undefined at any point. So something like this would work
const name = data && data.Item && data.Item.name;
However - you will have a problem doing this with Dialogflow
You don't show which Dialogflow library you're using, but most of them require you to return a Promise to indicate that it needs to wait for asynchronous calls (such as the call to DynamoDB) to complete. You're using get() with a callback function instead of a Promise. So you need to do one of the following:
Wrap the call in a Promise
Since get() returns an AWS.Request you can use the promise() method of this to get a Promise that you can return and which has then portions that generate the response - similar to how you're doing your callbacks now.
Under this scheme, your call might look something like this (untested):
return dbread.get(parameters).promise()
.then( data => {
console.log("success",JSON.stringify(data,null,2));
const name = data && data.Item && data.Item.name;
if( name ){
agent.add( `Hello ${name}` );
} else {
agent.add( "I don't know who you are." );
}
})
.catch( err => {
console.log("error",JSON.stringify(data,null,2));
agent.add( "There was an error" );
});

Is this is happening because of the asynchronous nature of Node.js or is there any alternate way to achiece this?

This is what I am trying to do.
I have an empty array
var send_data = [] ;
and I am using "sync-each" npm library of node.js before that I was doing the iterations using the map callback function but got stuck in the same situation.
Here's my code.
var each = require('sync-each');
client.execute(someQuery,[value],(err,data) => {
var items = data.rows;
each(items,(items,next) => {
// here I am performing some if-else queries and some Cassandra database queries and then pushing the value to my array send_data.
if(items.type == true) {
send_data.push({ value: items.message,flag:true });
}else{
send_data.push({value:items.message,flag:false});
}
},(err,transformedItems)=>{
if(err){
console.log(err);
}
});
});
My programs runs fine without getting any error but when I consoles the final output I get unsoreted list of array values like
[{value:1},{value:3},{value:2},{value:4}]
Is there's a way to correct this?
You can use the map function which makes more sense for your case:
var items = [1,2,3,4];
var send_data = items.map((item)=>({value:item}));
console.log(send_data);

convert mongoose stream to array

I have worked with mongodb but quite new to mongoose ORM. I was trying to fetch data from a collection and the explain() output was showing 50ms. the overall time it was taking to fetch the data via mongoose was 9 seconds. Here is the query:
Node.find({'dataset': datasetRef}, function (err, nodes){
// handle error and data here
});
Then I applied index on the field I was querying on. The explain() output now showed 4ms. But the total time to retrieve data via mongoose did not change. Then i searched a bit and found that using lean() can help bring the performance of read queries in mongoose quite close to native mongodb
So I changed my query to:
Node.find({'dataset': datasetRef})
.lean()
.stream({transform: JSON.stringify})
.pipe(res)
This solved the performance issues completely. But the end result is a stream of JSON docs like this:
{var11: val11, var12: val12}{var21: val21, var22: val22} ...
How do I parse this to form an array of docs ? Or should I not be using stream at all ? In my opinion, there is no point using a stream if I am planning to form the array at backend, since I will then have to wait for all the docs to be read into memory. But I also think that parsing and creating the whole array at front end might be costly.
How can I achieve best performance in this case without clogging the network as well ?
UPDATE
I am trying to solve this problem using a through stream. However, I am not able to insert commas in between the JSON objects yet. See the code below:
res.write("[");
var through = require('through');
var tr = through(
function write(data){
this.queue(data.replace(/\}\{/g,"},{"));
}
);
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': JSON.stringify});
dbStream.on("end", function(){
res.write("]");
});
dbStream
.pipe(tr)
.pipe(res);
With this, I am able to get the "[" in the beginning and "]" at the end. However, still not able to get patten "}{" replaced with "},{". Not sure what am I doing wrong
UPDATE 2
Now figured out why the replace is not working. It appears that since I have specified the transform function as JSON.stringify, it reads one JSON object at a time and hence never encounter the pattern }{ since it never picks multiple JSON elements at a time.
Now I have modified my code, and written a custom transform function which does JSON.stringify and then appends a comma at the end. The only problem I am facing here is that I don't know when it is the last JSON object in the stream. Because I don't wanna append the comma in that case. At the moment, I append an empty JSON object once the end is encountered. But somehow this does not look like a convincing idea. Here is the code:
res.write("[");
function transform(data){
return JSON.stringify(data) + ",";
}
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': transform});
dbStream.on("end", function(){
res.write("{}]");
});
dbStream
.pipe(res);
The only problem I am facing here is that I don't know when it is the last JSON object in the stream.
But you do know which one is first. Knowing that, instead of appending the comma, you can prepend it to every object except the first one. In order to do that, set up your transform function inside a closure:
function transformFn(){
var first = true;
return function(data) {
if (first) {
first = false;
return JSON.stringify(data);
}
return "," + JSON.stringify(data);
}
}
Now you can just call that function and set it as your actual transform.
var transform = transformFn();
res.write("[");
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': transform});
dbStream.on("end", function(){
res.write("]");
});
dbStream
.pipe(res);
#cbajorin and #rckd both gave correct answers.
However, repeating this code all the time seems like a pain.
Hence my solution uses an extra Transform stream to achieve the same thing.
import { Transform } from 'stream'
class ArrayTransform extends Transform {
constructor(options) {
super(options)
this._index = 0
}
_transform(data, encoding, done) {
if (!(this._index++)) {
// first element, add opening bracket
this.push('[')
} else {
// following element, prepend comma
this.push(',')
}
this.push(data)
done()
}
_flush(done) {
if (!(this._index++)) {
// empty
this.push('[]')
} else {
// append closing bracket
this.push(']')
}
done()
}
}
Which in turn can be used as:
const toArray = new ArrayTransform();
Model.find(query).lean().stream({transform: JSON.stringify })
.pipe(toArray)
.pipe(res)
EDIT: added check for empty
I love #cdbajorin's solution, so i created a more readable version of it (ES6):
Products
.find({})
.lean()
.stream({
transform: () => {
let index = 0;
return (data) => {
return (!(index++) ? '[' : ',') + JSON.stringify(data);
};
}() // invoke
})
.on('end', () => {
res.write(']');
})
.pipe(res);
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/shoppingdb');
var Sports = mongoose.model('sports', {});
var result = [];
var prefix_out = "your info";
Sports.find({"goods_category": "parts"}).
cursor().
on("data", function(doc){
//stream ---> string
var str = JSON.stringify(doc)
//sring ---> JSON
var json = JSON.parse(str);
//handle Your Property
json.handleYourProperty = prefix_out + json.imageURL;
result.push(result);
}).
on('error', function(err){
console.log(err);
}).
on('close', function(){
console.log(result);
});

Resources