Add data to Cloud Firestore in Loop - node.js

I have a nested json which have a key and value is another json with key and value. Below is the json.
{
"Testicular Torsion": {
"What is testicular torsion?": "ABC",
"Symptoms": "AB",
"Risks": "AL",
"Diagnosis": "LK",
"Treatment": "UY"
},
"XYZ": {
"X": "ABC",
"Symptoms": "AB",
"Risks": "AL",
"Diagnosis": "LK",
"Treatment": "UY"
}
};
What I am trying to do is insert the data in cloud firestore of firebase. Following is the code for the same. But the issue is, only the first key value pair(In this case the Testicular Torsion key and it's value which is another JSON) is getting inserted and not other key value pairs. Why is the case and what needs to be done in the code?
var string_medical_data = JSON.stringify(medical_json);
var json_medical = JSON.parse(string_medical_data);
function abc(poi){
firestore.collection('medical').doc(poi).set(json_medical[poi])
.then(() => {
return console.log("Added");
})
.catch((e => {
console.log('error: ', e);
return console.log(e);
}))
}
exports.medical = functions.https.onRequest((request, response) => {
var problems = [];
for(var myKey in json_medical) {
problems.push(myKey);
break;
}
for(var i=0;i<problems.length;i++){
// firestore.collection('medical').doc(problems[i]).set(json_medical[problems[i]])
abc(problems[i]);
}
response.send({
'fulfillmentText': `Success!!!`
});
});

You'd probably be better off doing this in a batch request. You can commit multiple writes in a single request. If your data has more than 500 entries you'll have to do break it up and do 500 at a time.
exports.medical = functions.https.onRequest((request, response) => {
var batch = firestore.batch();
for(var myKey in json_medical) {
var myKeyRef = firestore.collection('medical').doc(myKey);
batch.set(myKeyRef, json_medical[myKey]);
}
batch.commit().then(function () {
response.send({
'fulfillmentText': `Success!!!`
});
});
});

Related

Access the last element of a nested database

How to get the last element in a nested database in firebase.
This is my db
In this there are several ids and for each id under the bot's msg i want the last message which in this case will be
I am fine.Thank You....
I know the brute approach but it will take a lot of time.
Here is my code :
return ref.child(user).once('value').then((snapshot) => {
var i = 0;
snapshot.forEach((snap) => {
console.log(snap.child('bot').child('msg').msg);
snap.child('bot').child('msg')
.forEach((openTicketSnapshot) => {
var val = openTicketSnapshot.val();
val.forEach((text) => {
userMsg2.push({
'who': 'User',
'msg': text.msg,
'time': text.timestamp
});
})
});
//Here i will access the last index of userMsg
var data1 = {
"data": "found"
};
res.json(data1);
// ...
return data1;
})
How to do this efficiently?

Trouble with asynchronous requests pulling data from SQL database in Microsoft Bot Framework

I have a bot in the Microsoft bot Framework that I want to be able to pull data from an azure SQL database in order to answer questions asked to the bot. I have set up the database and it has some excel files in it.
Here is my code right now:
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var connection = new Connection(dataconfig);
connection.on('connect', function(err) {
console.log("Connected");
executeStatement();
});
var Request = require('tedious').Request;
var TYPES = require('tedious').TYPES;
function executeStatement() {
request = new Request("select \"Product Name\" from SPA_Data_Feeds where \"Strategic Priority\" = 'Accelerate to Value (LD)'",
function(err, rowCount, rows)
{
console.log(rowCount + ' row(s) returned');
}
);
var result = "";
var count = 0
request.on('row', function(columns) {
columns.forEach(function(column) {
console.log("%s\t", column.value);
result+= column.value + "\t\n";
count++;
if ( count == rowCount ) {
ATVData(result);
} ;
});
});
connection.execSql(request);
}
function ATVData(result) { //Puts "result" inside of an adaptive card }
I cant seem to figure out how to get the if statement right. rowCount does not work because it does not wait for it to be defined by the function before first, and I have tried using things like column(s).length, result(s).length but none work.
Is there something else I could use that would complete the if statement? Or do I need to reformat somehow with callbacks/promises to get it to wait for rowCount to be defined? If so could I get some advice on that?
Is there something else I could use that would complete the if statement? Or do I need to reformat somehow with callbacks/promises to get it to wait for rowCount to be defined? If so could I get some advice on that?
We can use Q.js which is one of the JavaScript Promise implementation to solve this issue. For example:
var Connection = require('tedious').Connection;
var Request = require('tedious').Request;
var q = require('q');
// Create connection to database
var config =
{
userName: '', // update me
password: '', // update me
server: '', // update me
options:
{
database: '' //update me
, encrypt: true
}
}
var connection = new Connection(config);
// Attempt to connect and execute queries if connection goes through
connection.on('connect', function(err)
{
if (err)
{
console.log(err)
}
else
{
queryDatabase().then(function(result){
ATVData(result);
}, function(err){
console.log(err);
});
}
}
);
function queryDatabase()
{
console.log('Reading rows from the Table...');
//create a promise
var deferred = q.defer();
// Read all rows from table
var result = [];
var request = new Request(
"SELECT * From ForumMessages",
function(err, rowCount)
{
deferred.resolve(result);
});
request.on('row', function(columns) {
columns.forEach(function(column) {
console.log("%s\t%s", column.metadata.colName, column.value);
result.push(columns);
});
});
connection.execSql(request);
//return the promise
return deferred.promise;
}
function ATVData(result){
//your bot code goes here
}
I think to expand on Grace's answer, for each row, you can also do this for some utility:
request.on('row', function(columns) {
var singleResult = {};
columns.forEach(function(column) {
console.log("%s\t%s", column.metadata.colName, column.value);
// Add a property to the singleResult object.
singleResult[column.metadata.colName] = column.value;
// Push the singleResult object to the array.
result.push(singleResult);
});
});
Then you can, in your bot's code, call each object by the property name in dot notation, for example: result[x].colName where colName is the name of the column (or object property in this case).
Example (assuming at least one result item from the database, with a "link" column that has data):
var adaptiveCardExample = {
'contentType': 'application/vnd.microsoft.card.adaptive',
'content': {
'$schema': 'http://adaptivecards.io/schemas/adaptive-card.json',
'type': 'AdaptiveCard',
'version': '1.0',
'body': [
{
"type": "TextBlock",
"text": "Code Example"
},
{
"type": "TextBlock",
"text": "We're going to " + result[0].link,
"wrap": true
}],
'actions': [
{
'type': 'Action.OpenUrl',
'title': 'Go to the example link',
'url': result[0].link
}
]
}
};
var adaptiveCardMsg = new builder.Message(session).addAttachment(adaptiveCardExample);
session.send(adaptiveCardMsg);
You may want to add a check for null or undefined for the property in the case it is a nullable field in the database, as a precaution.

async.waterfall randomly sorts results

I was writing a nest code, tried using async.waterfall or async.series but I am getting random results every time I refresh. it seems to be because of the queries of the first 2 functions randomly finishing.
first query was sorting to committed_date DESC. but when I add the 2 sub queries. the sort gets distorted.
Step1: Loop landingpages
Step1.1 - fetch details1 repositories
Step1.2 - fetch details2 versions
Step2: build array
db.collection('landingpages').find({is_deleted:{$ne:1}}).sort({committed_date:-1}).limit(10).toArray(function(err, db_results) {
var data_array = [];
var x=1;
if(db_results.length == 0) {
return_data.lps = data_array;
parallel_done();
}else{
async.each(db_results, function (db_results1, cb) {
async.watefall(
[
function(callback) {
//if this is removed or passed as callback(null, ""); the sort is fixed from committed - 1
var data_repo = {};
db.collection('repositories').find({repository_id: repository_id}).toArray(function(err, db_results1) {
if(db_results1.length == 0){
var data_repo = {};
callback(null, data_repo);
}else{
var data_repo = db_results1[0];
callback(null, data_repo);
}
});
},
function(callback) {
//if this is removed or passed as callback(null, ""); the sort is fixed from committed - 1
var data_version = {};
db.collection('versions').find({landingpage_id: landingpage_id}).sort({_id:-1}).limit(1).toArray(function(err, db_results1) {
if(db_results1.length == 0){
var data_version = {};
callback(null, data_version);
}else{
var data_version = db_results1[0];
callback(null, data_version);
}
});
}
],
function (err, data_repo,data_version) {
var document = {
"x": x++,
"landingpage_id": db_results1.landingpage_id,
"repository_id": db_results1.repository_id,
"version_id": data_version.version_id,
"val": db_results1,
"data_repo": data_repo,
"data_version": data_version,
};
data_array.push(document);
if(data_array.length == db_results.length) {
return_data.lps = data_array;
}
}
);
});
}
});

Update variable value in module

I'm a real newbie in node.js so pls understand my possible stupidity
I'm trying to use a external file to serve as a module so I can use it in other files. The project is bigger than this but let's say my module is:
var Tools = module.exports = {
result_arr: [],
object_data: {
times : [],
temps1 : [],
temps2 : [],
temps3 : [],
temps4 : [],
levels : [],
flows : []
},
getLastNRows: function (whereIsData, DB_info, table, NRows) {
if (whereIsData == "MySQL") {
function setValue (value) {
Tools.result_arr = value;
}
function dataArray2Object (array_data) {
Tools.object_data.times = array_data.map(row => row.timestamp);
Tools.object_data.temps1 = array_data.map(row => row.temp1);
Tools.object_data.temps2 = array_data.map(row => row.temp2);
Tools.object_data.temps3 = array_data.map(row => row.temp3);
Tools.object_data.temps4 = array_data.map(row => row.temp4);
Tools.object_data.levels = array_data.map(row => row.level_ice_bank);
Tools.object_data.flows = array_data.map(row => row.flow);
}
var queryString = "SELECT timestamp, temp1, temp2, temp3, temp4, level_ice_bank, flow FROM " +
table + " ORDER BY id DESC LIMIT " + NRows + ";";
var connnection = mysql.createConnection(DB_info);
connnection.connect(function(err) {
console.log("connected");
if (err) throw err;
});
connnection.query(queryString, function (err, rows) {
console.log("queried");
if (err) throw err;
setValue(rows);
dataArray2Object(Tools.result_arr);
console.log(Tools.result_arr);
console.log(Tools.object_data);
});
}
else {
console.log("Function only accepts data stored in MySQL.\n(u still have to improve...)");
return;
}
};
The variable object_data is supposed to be used in a main file. This way, whenever I call getLastNRows, I expect object_data to be updated by the operations in getLastNRows. The main file would be:
var tools = require('./tools');
var where2save = "MySQL";
var info_db = {
host : "127.0.0.1",
user : "root",
password: "xxxx",
database: "mydb",
port : 3306
};
var table = "tempdata";
var NRows = 4;
tools.getLastNRows(where2save, info_db, table, NRows);
console.log(tools.object_data);
What is observed is that, in fact, tools.object_data is not updated by getLastNRows in the main file, although console.log(Tools.object_data); from the tools.js (module) file logs the updated values. So my question is:
How can I make getLastNRows update tools.object_data (which is empty when created) in the main file?
Is getLastNRows asynchronous? Cuz it seems to me that is the cause of the problem.
It calls the getLastNRows in main which then calls connection.query, which gets put on a worker thread then immediately continues to the console.log where tools.object_data has not been updated.
Try:
getLastNRows: function (whereIsData, DB_info, table, NRows, cb) {
// ...
connnection.query(queryString, function (err, rows) {
console.log("queried");
if (err) throw err;
setValue(rows);
dataArray2Object(Tools.result_arr);
console.log(Tools.result_arr);
console.log(Tools.object_data);
cb()
});
// ...
}
// in main
tools.getLastNRows(where2save, info_db, table, NRows, function() {
console.log(tools.object_data);
});

Fetch multiple documents in a stored procedure (Azure DocumentDB)

I have two document types, Listing and Products. A Listing object contains a list of Products for certain countries, like this:
Listing:
{
"Name": "Default",
"Countries": {
"_default": [
"4QlxAPFcCAAPAAAAAAAAAA==",
"4QlxAPFcCAAHAAAAAAAAAA=="
],
"US": [
"4QlxAPFcCAAIAAAAAAAAAA==",
"4QlxAPFcCAAHAAAAAAAAAA=="
]
},
"Type": "Listing",
"id": "dfed1839-07c5-482b-81c5-669b1dbcd0b6",
"_rid": "4QlxAPFcCAAEAAAAAAAAAA=="
}
Product:
{
"Name": "Widget",
"Price": 3.45,
"Type": "Product",
"_rid": "4QlxAPFcCAAHAAAAAAAAAA=="
}
My goal was to create a stored procedure in the Azure DocumentDB collection taking two parameters, ridand country, which would essentially fetch the Listing document, and the documents for that country, in the most efficient manner possible. My presumption is that loading a Document by its resource Id using getContext().getCollection().readDocument(...) would be the fastest way, thus attempting to create a stored procedure for this.
My attempts have been to nest the consecutive calls (callback hell?), using generator/iterators with yield and then with a pure Promise approach. All of the attempts have given the same result:
It will fetch the first document, but will end quite abruptly after the document has been received.
For reference, here's my latest attempt:
function test(rid, country) {
var collection = getContext().getCollection();
var collectionSelfLink = collection.getSelfLink();
var docsLink = collectionSelfLink + "docs/";
var body = getContext().getResponse().setBody;
function getDocument(rid) {
return new Promise(function(resolve, reject) {
var accepted = collection.readDocument(docsLink + rid, (err, doc, opts) => {
resolve(doc);
});
if (!accepted)
reject("Not accepted");
});
}
getDocument(rid)
.then(doc => {
body("0. First step"); // set test body
// Countries is a Dictionary<string, string[]> with resource ids
return doc.Countries[country] || doc.Countries["_default"];
})
// This is how far it gets, resulting in response "1. Documents to fetch: 2"
.then(a => body("1. Documents to fetch: " + a.length))
.then(a => a.map(function(productId) { return getDoument(productId); }))
.then(a => body("2. It should come this far, right?"))
.then(a => Promise.all(a))
.then(a => body(a))
.catch(function(e) { throw new Error(JSON.stringify(e)); });
}
It turns out that nesting the calls do in fact work, if you alter the response body frequently(?)
The following procedure worked as expected:
function test(rid, country) {
var collection = getContext().getCollection();
var collectionSelfLink = collection.getSelfLink();
var docsLink = collectionSelfLink + "docs/";
var body = getContext().getResponse().setBody;
var accepted = collection.readDocument(docsLink + rid, (err, doc, opts) => {
if (err) throw new Error(err.message);
// Countries is a Dictionary<string, string[]> with resource ids
var offerIds = doc.Countries[country] || doc.Countries["_default"];
var result = [];
for (var docId of offerIds) {
var subAccepted =
collection.readDocument(docsLink + docId, (err, doc, opts) => {
if (err) throw new Error(err.message);
result.push(doc);
});
if (!subAccepted)
throw new Error("A subsequent request was not accepted");
body(result); // <-- Note, setting body in each iteration.
}
});
if (!accepted)
throw new Error("The request was not accepted");
}

Resources