Implement Redis in Node.js Bot Service - node.js

Bot Info
App ID: 776ba3b4-38e5-4582-809d-7c8d773cfe9b
SDK Platform: Node.js
SDK Version:
Active Channels: Direct Line
Deployment Environment: Auzure Bot Service
Issue Description
I Need Help implementing Redis to save Bot State. I'm working in a project that is really a requirement that we reduce as much latency as possibe. Right know we are using DocumentDB but since Redis works with memory this could be faster.
I've followed the tutorial using mongo DB, Microsoft Bot framework MongoDB as middle layer to store conversational states, data and context and I'm editing the file /lib/IStorageClient.js to connect, save and retrieve from redis.
Code Example
This is my implementation of the /lib/IStorageClient.js, instead of using MongoDB connection I've put Redis connection
"use strict";
var Consts = require('./Consts');
var redis = require('redis');
var IStorageClient = (function () {
function IStorageClient(options) {
this.options = options;
}
IStorageClient.prototype.initialize = function (callback) {
var _this = this;
var host = "MyRedis.redis.cache.windows.net";
var auth = "KEY";
var client = redis.createClient(6380,host , {auth_pass: auth, tls:
{servername: host}});
this.client = client;
callback(null);
};
IStorageClient.prototype.insertOrReplace = function (partitionKey, rowKey,
entity, isCompressed, callback) {
console.log("=========Insert IStorageClient===========")
var docDbEntity = { id: partitionKey + ',' + rowKey, data: entity,
isCompressed: isCompressed };
var host = "MyRedis.redis.cache.windows.net";
var auth = "KEY";
var client = redis.createClient(6380,host , {auth_pass: auth, tls:
{servername: host}});
client.set(partitionKey + ',' + rowKey, JSON.stringify(docDbEntity),
function(err, reply) {
console.log("=========SET===========");
console.log("ID: ",partitionKey + ',' + rowKey);
console.log("Result: ",docDbEntity);
});
};
IStorageClient.prototype.retrieve = function (partitionKey, rowKey,
callback) {
console.log("=========Retrieve IStorageClient===========")
var id = partitionKey + ',' + rowKey;
var host = "MyRedis.redis.cache.windows.net";
var auth = "KEY";
var client = redis.createClient(6380,host , {auth_pass: auth, tls:
{servername: host}});
//id
client.get(id, function(error, result){
console.log("=========Get===========");
console.log("Search: ",id);
console.log("Result: ",result);
if (error) {
console.log("Error:",error)
callback(error, null, null);
}
else if (result == null) {
callback(null, null, null);
}
else if (result.length == 0) {
callback(null, null, null);
}
else {
var finaldoc = JSON.parse(result);
callback(null, finaldoc, null);
}
});
};
IStorageClient.getError = function (error) {
if (!error)
return null;
return new Error('Error Code: ' + error.code + ' Error Body: ' +
error.body);
};
return IStorageClient;
}());
exports.IStorageClient = IStorageClient;
Reproduction Steps
Download Microsoft Bot framework MongoDB as middle layer to store conversational states, data and context
Replace /lib/IStorageClient.js with my implementation
Set a Redis account and key in the /lib/IStorageClient.js
Run in the bot emulator
Actual Results
I could see the json saving to Redis, also I could print the retrieve result in the console, but the thing is that the answer is not being received in the bot emulator.

You are looking for botbuilder-redis-storage middleware, available here:
GitHub - https://github.com/suttna/botbuilder-redis-storage
NPM - https://www.npmjs.com/package/botbuilder-redis-storage
Usage example:
var redis = require('redis')
var RedisStorage = require('botbuilder-redis-storage')
var builder = require('botbuilder')
// Initialize redis client
var redisClient = redis.createClient(process.env.REDIS_URL, { prefix: 'bot-storage:' });
// Create new storage with redis client
var storage = new RedisStorage(redisClient)
var connector = new builder.ChatConnector()
var bot = new builder.UniversalBot(connector)
// Configure bot to use the RedisStorage
bot.set('storage', storage)

Related

How to execute SQL function in Azure function project

I am working on an azure function that is invoking a web service in node.js and it works fine. I have a function GetDetails to make an SQL query to retreive data from sql server database.
const sql = require("mssql");
const dataSQL = {};
const GUID = "";
const navServiceKey = "";
const navUserName = "";
async function GetDetails() {
var email = "yamen#gmail.com";
var password = "password";
try {
console.log("nav service" + navServiceKey);
// make sure that any items are correctly URL encoded in the connection string
await sql.connect(
"Server=tcp:app.windows.net,1433;Database=BHUB_TEST;User Id=AppUser;Password=password;Encrypt=true MultipleActiveResultSets=False;TrustServerCertificate=False;ConnectionTimeout=30;"
);
const result =
await sql.query`select * from users where email = ${email} AND password = ${password} `;
if (result.rowsAffected[0] >= 1) {
dataSQL = result.recordset[0];
navServiceKey = JSON.stringify(dataSQL.navServiceKey);
GUID = JSON.stringify(dataSQL.userGUID);
navUserName = JSON.stringify(dataSQL.navUserName);
} else {
console.log("failed");
}
} catch (err) {
}}
so since this is in node.js if i were to test this sql function only i'd do the following i.e. node index.js - then function will be executed successfully and return result. However, I am calling this function within the azure function like below but when I run the azure function project, then I copy the URL given to test it on postman, the sql function won't return anything !
Any idea of how to execute SQL query function in Azure function if that makes sense ?
module.exports = async function (context, req) {
GetDetails();
const axios = require("axios");
const data = {
email: req.query.email,
password: req.query.password,
};
var cred = "YAMEN" + ":" + "jbdv******";
const encoded = Buffer.from(cred, "utf8").toString("base64");
var credbase64 = "Basic " + encoded;
const headers = {
Authorization: credbase64,
"Content-Type": " application/json",
};
{
try {
const url = `https://tegos/BC19-NUP/QryEnwisAppUser?filter=UserSecurityID eq ${GUID}`;
const response = await axios.get(url, {
headers,
});
console.log(response);
console.log(response.data);
context.res = {
// status: 200, /* Defaults to 200 */
body: response.data,
};
} catch (e) {
// maybe return the error
console.error(e);
}}};
That is not how you connect to a SQL database from an Azure application. You need to use the pyodbc module instead.
Quickstart: Use Python to query a database

What is the proper way to set up this Cron Task after a restart?

I am building an IOT application which connects to an MQTT server where users subscribe to a topic for a particular building. I am able to connect, retrieve the information I need, and save it to a database.
The problem is, each individual user is stored in the session, and the connection parameters are submitted in a form, thereby the Subscribe function, which is needed for the topic subscription loses the information it needs for connection on each server restart.
What is the proper way to setup a cron task, which stores the submitted info for each user, along with their session, when the server is restarted(especially when it crashes from a bug)?
var mqtt = require('mqtt');
var Building = require('../models/Building')
var Volume= require('../models/Volume')
/////////////////////////////////////////////////////////////////////////
var today = new Date();
var date = today.getFullYear() + '-' + (today.getMonth() + 1) + '-' + today.getDate();
///////////////////////START MQTT SUBSCRIPTION//////////////////////////////////////////////////////////////////
const Subscribe = async (req, res, name) => {
const options = {
port: port,
username: req.body.username,
password: req.body.password,
clientId: "someclientid"
}
const client = mqtt.connect("someserver", options);
var topic_list = "sometopic"
client.on('connect', function () {
client.subscribe(topic_list, function (err, granted) {
if (err) {
console.error(err);
return;
}
console.log('Subscribed to topic: ' + topic_list);
});
console.log("connected flag " + client.connected);
})
client.on('message', function (topic, message, packet) {
var msgObject = JSON.parse(message.toString())
var data = JSON.stringify(msgObject.data).slice(-9, -5);
console.log(JSON.stringify(msgObject.data))
var v = new DataView(new ArrayBuffer(4))
v.setUint32(0, `0x${data}0000`)
console.log('message is ' + v.getFloat32(0))
var decimal = v.getFloat32(0);
storedata(req, decimal, name);
});
}
/////////////////////////End MQTT SUBSCRIPTION//////////////////////////////////////////////
async function storedata(req, value, name) {
const absvalue = Math.abs(value)
if (absvalue != 0) {
var building = await Building.findOne({
user: req.user,
name: name
});
if (building) {
const oldvolume = await Volume.create({source:name, calculated_at:date, volume:absvalue, user:req.user});
oldvolume.save()
console.log(oldvolume)
}
}
}
module.exports.Subscribe = Subscribe;

Moving data from nodejs console to SQL Server

I am very new to node.js. Using the following code I am able to retrive data from the wesbite intrino into console.
var https = require("https");
var username = "********";
var password = "********";
var auth = "Basic " + new Buffer(username + ':' +
password).toString('base64');
var request = https.request({
method: "GET",
host: "api.intrinio.com",
path: "/companies?ticker=AAPL",
headers: {
"Authorization": auth
}
}, function (response) {
var json = "";
response.on('data', function (chunk) {
json += chunk;
});
response.on('end', function () {
var company = JSON.parse(json);
console.log(company);
});
});
request.end();
And the result is as follows:-
My question is: how do I transfer this data into SQL Server? I tried watching a few tutorials and videos. But I was not able to exactly understand how it works.
Thanks in advance.
This is a very broad question. Connecting MS-SQL server from node usually uses tedious package. You can directly use this package to insert data as described in https://learn.microsoft.com/en-us/sql/connect/node-js/step-3-proof-of-concept-connecting-to-sql-using-node-js
Following is a snippet from the above link.
var Connection = require('tedious').Connection;
var config = {
userName: 'yourusername',
password: 'yourpassword',
server: 'yourserver.database.windows.net',
// If you are on Azure SQL Database, you need these next options.
options: {encrypt: true, database: 'AdventureWorks'}
};
var connection = new Connection(config);
connection.on('connect', function(err) {
// If no error, then good to proceed.
console.log("Connected");
executeStatement1();
});
var Request = require('tedious').Request
var TYPES = require('tedious').TYPES;
function executeStatement1() {
request = new Request("INSERT SalesLT.Product (Name, ProductNumber, StandardCost, ListPrice, SellStartDate) OUTPUT INSERTED.ProductID VALUES (#Name, #Number, #Cost, #Price, CURRENT_TIMESTAMP);", function(err) {
if (err) {
console.log(err);}
});
request.addParameter('Name', TYPES.NVarChar,'SQL Server Express 2014');
request.addParameter('Number', TYPES.NVarChar , 'SQLEXPRESS2014');
request.addParameter('Cost', TYPES.Int, 11);
request.addParameter('Price', TYPES.Int,11);
request.on('row', function(columns) {
columns.forEach(function(column) {
if (column.value === null) {
console.log('NULL');
} else {
console.log("Product id of inserted item is " + column.value);
}
});
});
connection.execSql(request);
}
But using an ORM is the best practice. sequelize is one of the best in the node community.
$ npm install --save sequelize
$ npm install --save tedious // MSSQL
const Sequelize = require('sequelize');
const sequelize = new Sequelize('database', 'username', 'password', {
host: 'localhost',
dialect: 'mssql'
});
You must go through either of these modules documentation to understand better.

Algolia in Azure

I am using firebase for my android application and am performing full text search using Algolia which is suggested by all the blogs. I have successfully developed the script and its functioning properly. Now I want to host the script to run 24* 7. As I have an azure account , how do I go about uploading the script ? I have tried uploading the following as a function , web app but have been unsuccessful .
PS:- I have tried Heroku but wasn't satisfied.
The Script.
var http = require('http');
var port = process.env.port || 1337;
http.createServer(function (req, res) {
res.writeHead(200, { 'Content-Type': 'text/plain' });
res.end('Hello World\n');
}).listen(port);
var dotenv = require('dotenv');
var firebaseAdmin = require("firebase-admin");
var algoliasearch = require('algoliasearch');
var algoliasearchHelper = require('algoliasearch-helper');
// load values from the .env file in this directory into process.env
dotenv.load();
// configure firebase
var serviceAccount = require("./serviceAccountKey.json");
firebaseAdmin.initializeApp({
credential: firebaseAdmin.credential.cert(serviceAccount),
databaseURL: process.env.FIREBASE_DATABASE_URL
});
var database = firebaseAdmin.database();
// configure algolia
var algolia = algoliasearch(process.env.ALGOLIA_APP_ID, process.env.ALGOLIA_API_KEY);
var index = algolia.initIndex('books');
var contactsRef = database.ref("/BookFair");
contactsRef.on('child_added', addOrUpdateIndexRecord);
contactsRef.on('child_changed', addOrUpdateIndexRecord);
contactsRef.on('child_removed', deleteIndexRecord);
function addOrUpdateIndexRecord(dataSnapshot) {
// Get Firebase object
var firebaseObject = dataSnapshot.val();
// Specify Algolia's objectID using the Firebase object key
firebaseObject.objectID = dataSnapshot.key;
// Add or update object
index.saveObject(firebaseObject, function(err, content) {
if (err) {
throw err;
}
console.log('Firebase object indexed in Algolia', firebaseObject.objectID);
});
}
function deleteIndexRecord(dataSnapshot) {
// Get Algolia's objectID from the Firebase object key
var objectID = dataSnapshot.key;
// Remove the object from Algolia
index.deleteObject(objectID, function(err, content) {
if (err) {
throw err;
}
console.log('Firebase object deleted from Algolia', objectID);
});
}
var queries = database.ref("/queries");
queries.on('child_added', addOrUpdateIndexRecordN);
function addOrUpdateIndexRecordN(dataSnapshot) {
// Get Firebase object
var firebaseObject = dataSnapshot.val();
// Specify Algolia's objectID using the Firebase object key
firebaseObject.objectID = dataSnapshot.key;
// Add or update object
var collegeName = "";
var query_ID_LOLWA= "";
var year="";
var query = "";
var counter = 0;
for(var i in firebaseObject){
var c = firebaseObject.charAt(i);
if(c=='/'){
counter = counter + 1;
continue;
}
else{
if(counter==2)
collegeName = collegeName + c;
else if(counter == 3)
year = year+c;
else if(counter == 1)
query_ID_LOLWA = query_ID_LOLWA + c;
else
query = query +c;
}
}
console.log(collegeName);
console.log(year);
console.log(query_ID_LOLWA);
console.log(query);
const query_final = query_ID_LOLWA;
var helper = algoliasearchHelper(algoliasearch("****", "****"), 'books', {
facets: ['collegeName', 'year','priority']});
helper.on('result', function(data,query_ID_LOLWA){
data.getFacetValues('priority',{sortBy: ['count:desc']});
console.log(data.hits);
var path_query = "/queries_answers/"+query_final;
path_query = path_query.toString();
console.log(path_query);
if(data.hits.length==0){
console.log("No results");
database.ref(path_query).push(-1);
}
else if(data.hits.length>1){
var ID = 1;
var counter = -1;
var length = data.hits.length-1;
for(var h in data.hits){
counter = counter + 1;
if( (counter%5 == 0) && (counter != 0)){
ID = ID + 1;
}
database.ref(path_query+"/"+ID).push(data.hits[h].uuid);
}
database.ref(path_query+"/totalResults").push(data.hits.length);
}
else{
database.ref(path_query+"/totalResults").push(data.hits.length);
for(var h in data.hits)
database.ref(path_query+"/1").push(data.hits[h].uuid);
}
});
helper.addFacetRefinement('collegeName', collegeName);
helper.addFacetRefinement('year',year);
helper.setQuery(query);
helper.search();
/*index.search(firebaseObject, function(err, content) {
if (err) {
console.error(err);
return;
}
console.log(content.hits);
for (var h in content.hits) {
console.log('Hit(' + content.hits[h].objectID + '): ' + content.hits[h].uuid);
}
database.ref("/query_result").push(content.hits);
});*/
}
Without more details than but have been unsuccessful, the only advice one could give you is to follow the usual steps to get a time-based Azure function deployed.
The simplest way is to use the Azure Portal:
Login to your Microsoft Azure account
Create a Function App to host your function
Add a Timer-triggered Function
Select the TimerTrigger-Javascript template to get started
At this point, you'll have a function that runs every minute. You can check the logs to confirm it is working.
You now want to configure its frequency:
Update the function's Timer Schedule (in the Integrate tab) to set how frequently the function should run
Finally, replace the template's code with your own.
You can find a detailed tutorial here with explanations on how to achieve each of these steps.

How to invoke/use the ranker ID from within the nodeJs code to call Retrieve & Rank API?

I have tried a successful approach in Watson Retrieve and Rank, to set the 'ranker_id' from within the code snippet versus setting it as an environment variable.
Below is the code snippet:
var qs = require('querystring');
// search documents
var ranker_id = 'replace with ID';
var question = payload.input.text; //Only the question is required
var query = **qs.stringify({q: question, ranker_id: ranker_id, fl: 'id,title,contentHtml'});**
solrClient.get('fcselect', query, function(err, searchResponse) {...}.
In some versions of npm, qs also works-
var qs = require('qs');
**This would be the requirement to deploy in all production architecture, where the code would reside in production servers & make calls to the API. In such a scenario, env variable(ranker_id) could not be set in production environment, hence this approach
You can see in this documentation here and see the all examples how to use Retrieve and Rank - IBM Watson.
In this case the password and username is Service Credentials.
One example search and rank:
var watson = require('watson-developer-cloud');
var retrieve_and_rank = watson.retrieve_and_rank({
username: '{username}',
password: '{password}',
version: 'v1'
});
var params = {
cluster_id: 'sc1ca23733_faa8_49ce_b3b6_dc3e193264c6',
collection_name: 'example_collection'
};
// Use a querystring parser to encode output.
var qs = require('qs');
// Get a Solr client for indexing and searching documents.
// See https://github.com/watson-developer-cloud/node-sdk/blob/master/services/retrieve_and_rank/v1.js
solrClient = retrieve_and_rank.createSolrClient(params);
var ranker_id = 'B2E325-rank-67'; //PASTE YOUR RANKER_ID
var question = 'what is the basic mechanism of the transonic aileron buzz';
var query = qs.stringify({q: question, ranker_id: ranker_id, fl: 'id,title'});
solrClient.get('fcselect', query, function(err, searchResponse) {
if(err) {
console.log('Error searching for documents: ' + err);
}
else {
console.log(JSON.stringify(searchResponse.response.docs, null, 2));
}
});
See one example how Get information about a ranker:
var watson = require('watson-developer-cloud');
var retrieve_and_rank = watson.retrieve_and_rank({
username: '{username}', //username from Service Credentials Retrieve and Rank
password: '{password}', // password from Service Credentials Retrieve and Rank
version: 'v1'
});
var params = {
ranker_id: 'B2E325-rank-67', //PASTE YOUR RANKER_ID
};
retrieve_and_rank.rankerStatus(params,
function(err, response) {
if (err)
console.log('error:', err);
else
console.log(JSON.stringify(response, null, 2));
});
Example Index documents:
//require watson
var watson = require('watson-developer-cloud');
//call with your password and username from Service Retrieve and Rank Credentials
var retrieve_and_rank = watson.retrieve_and_rank({
username: '{username}',
password: '{password}',
version: 'v1'
});
//cluster id from your documentation
var params = {
cluster_id: 'sc1ca23733_faa8_49ce_b3b6_dc3e193264c6',
collection_name: 'example_collection',
};
// your doc here
var doc = {
id: 1,
author: 'brenckman,m.',
bibliography: 'j. ae. scs. 25, 1958, 324.',
body: 'experimental investigation of the aerodynamics of a wing in a slipstream. an experimental study of a wing in a propeller slipstream was made in order to determine the spanwise distribution of the lift increase due to slipstream at different angles of attack of the wing and at different free stream to slipstream velocity ratios.',
title: 'experimental investigation of the aerodynamics of a wing in a slipstream'
};
//Get a Solr client for indexing and searching documents with rankerid inside params variable
solrClient = retrieve_and_rank.createSolrClient(params);
console.log('Indexing a document...');
solrClient.add(doc, function (err, response) {
if (err) {
console.log('Error indexing document: ', err);
}
else {
console.log('Indexed a document.');
solrClient.commit(function(err) {
if(err) {
console.log('Error committing change: ' + err);
}
else {
console.log('Successfully committed changes.');
}
});
}
});

Resources