I am trying to integrate IBM Watson bot with twilio, whatsapp using IBM cloud functions using Nodejs. I followed the following to come up with this code https://github.com/ChaitanyaGhantasala/Whatsapp/blob/master/app.js
Please find below the Code:
// Import Modules
var express = require('express')
var bodyParser = require('body-parser')
var app = express()
var AssistantV1 = require('ibm-watson/assistant/v1');
const { IamAuthenticator } = require('ibm-watson/auth');
// Twilio Credentials
var accountSid = '';
var authToken = '';
var client = require('twilio')(accountSid, authToken);
app.use(bodyParser.urlencoded({ entended: false }));
var env= require('dotenv').config()
// Watson Credentials
var assistant = new AssistantV1({
version: '2018-09-20',
authenticator: new IamAuthenticator({
apikey: '',
}),
url: '',
});
var context1 = {};
app.get('/test', function (req, res) {
})
// API
app.post('/api', function (req, res) {
console.log("Request Object");
var From = req.body.From;
console.log(From);
assistant.message({
skill_id: '',
input: { 'text': req.body.Body },
context: context1
}, function (err, response) {
if (err)
console.log('error:', err);
else {
context1 = response.context;
var msg = response.output.text[0];
console.log("message", msg);
client.messages
.create({
body: msg,
from:'whatsapp:+14155238886',
to: 'From',
}).then(message = console.log(msg))
.done();
}
})
});
//PORT Listen
app.listen(process.env.PORT||8000, function () {
console.log("Server is running at 8000");
});
This line shows an error
const { IamAuthenticator } = require('ibm-watson/auth');
Also, i have no idea how the integration work
Can you please help me with any resources?
This does not work with me,
I am basing this answer on you not being able to format code. The code you show is basic / simple, yet you have made it undecipherable by not formatting it.
You say your error is on the line:
const { IamAuthenticator } = require('ibm-watson/auth');
This is a simple import of a pre-requisite module, which suggests that you have not imported the module ibm-watson, however, if that were the case then the line above it should also fail.
var AssistantV1 = require('ibm-watson/assistant/v1');
In which case you are hitting a version problem, either in your version of Node.js that is not recognising the const format, or in the version of ibm-watson that is not exporting ibm-watson/auth.
Based on your lack of basic code formatting, I am guessing that it is the version of Node.js. The current version of ibm-watson : 5.6.0 requires "node": ">=10.0.0".
Related
Devs! I created a simple bot for Telegram using JS language that when typing '/ tutorials' a txt list is loaded.
I deployed this bot to Heroku so that it goes online.
The problem is that the bot works for a while and then stops working, I don't know why.
Has anyone had this problem?
//Correction message error on console
process.env.NTBA_FIX_319 = 1;
var express = require('express');
var app = express();
app.set('port', (process.env.PORT || 5000));
const TelegramBot = require('node-telegram-bot-api')
const TOKEN = 'myToken'
const bot = new TelegramBot(TOKEN, { polling: true })
bot.on('message', (msg) => {
const chatId = msg.chat.id;
const text = msg.text;
//read list txt
var fs = require('fs');
try {
var data = fs.readFileSync('lista.txt', 'utf8');
} catch (e) {
console.log('Error:', e.stack);
}
//send message
if (text.includes('/tutoriais')) {
bot.sendMessage(chatId, `Olá, ${msg.chat.first_name}! 😎\nLista de Tutoriais`)
bot.sendMessage(chatId, data);
}
});
I added a webhook to the python file and it ended up working fine
https://github.com/victoraugusto6/bot-Telegram-JS-Heroku
I'm currently using Glitch's(Glitch.com) node.js to connect Dialogflow to code and I'm running into a problem. As you can see below; I have two intents I'm trying to pass values to, characterHandler and openHandler.
Now the weird thing is that it does execute the web hook correctly if I trigger the intent on Dialogflow corresponding to "characterHandler", but it returns "UnhandledPromiseRejectionWarning: Error: no matching intent handler for: null" in the console and fails while triggering "openHandler" and I have no clue why.
Does anyone know what I'm doing wrong?
'use strict';
process.env.DEBUG = 'actions-on-google:*';
const express = require('express');
const bodyParser = require('body-parser');
const request = require("request");
const { DialogflowApp } = require('actions-on-google');
const Map = require('es6-map');
const app = express();
app.use(bodyParser.json());
let characters = ['The Pied Piper', 'Red Riding Hood', 'The Big Bad Wolf'];
// [START Action]
app.post('/', function (request, response) {
const assistant = new DialogflowApp({request, response});
console.log('Request headers: ' + JSON.stringify(request.headers));
console.log('Request body: ' + JSON.stringify(request.body));
const CHARACTERS = 'story.characters';
const OPENINGTIMES = 'openingTimes';
function characterHandler (assistant) {
let responseText = "How about";
responseText = characters[Math.floor(Math.random() * characters.length)];
assistant.tell(responseText);
}
function openHandler (assistant) {
assistant.tell('This attraction is currently full');
}
const actionMap = new Map();
actionMap.set(CHARACTERS, characterHandler);
actionMap.set(OPENINGTIMES, openHandler);
assistant.handleRequest(actionMap);
});
// [END Action]
// Renders the homepage
app.get('/', function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.write('');
res.end();
});
if (module === require.main) {
// [START server]
// Start the server
let server = app.listen(process.env.PORT || 3000, function () {
let port = server.address().port;
console.log('App listening on port %s', port);
});
// [END server]
}
module.exports = app;
Your open handler function is mapped to 'openingTimes'. Make sure that exactly matches the intent name and make sure that the intent was actually saved correctly.
I'm currently working on a Phonegap app, and I would like users to be able to upload any file to my NodeJS server.
I've looking all around the web but I just can't get anything to work...
Here is the code I'm using for the Phonegap controller:
$scope.open = function()
{
navigator.camera.getPicture(upload,
function(message)
{
alert('get picture failed');
},
{
quality: 50,
destinationType: navigator.camera.PictureSourceType.FILE_URI,
sourceType: navigator.camera.PictureSourceType.PHOTOLIBRARY,
mediaType: navigator.camera.MediaType.ALLMEDIA
});
}
var win = function (r) {
$scope.log = "Code = " + r.responseCode;
$scope.log2 = "Response = " + r.response;
$scope.log3 = "Sent = " + r.bytesSent;
$scope.$digest();
}
var fail = function (error) {
$scope.log = "An error has occurred: Code = " + error.code;
$scope.log2 = "upload error source " + error.source;
$scope.log3 = "upload error target " + error.target;
$scope.$digest();
}
function upload(fileURI)
{
$scope.log = fileURI;
$scope.$digest();
var options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = fileURI.substr(fileURI.lastIndexOf('/') + 1);
options.mimeType = "text/plain";
options.chunkedMode = false;
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
var ft = new FileTransfer();
ft.upload(fileURI, "http://192.168.192.111:2999/upload", win, fail, options);
}.
Here is the current code for the NodeJS server, have tried a lot of different things, all without success:
var express = require('express');
var http = require('http').Server(express);
var io = require('socket.io')(http);
var fs = require('fs');
var multer = require('multer');
var app = new express();
app.post('/upload', multer({dest: './uploads/'}).single('upl'), function(req, res)
{
console.log(req.body);
console.log(req.file);
})
http.listen(2999, function(){
console.log('listening on *:2999');
});
In the app I used to get errors that FileUploadOptions etc weren't defined, but I fixed that by adding them to the cordova project.
Furthermore, I use ionic 1, if that helps anyone out.
I do keep constantly getting the error code 1 (upload error source), even though I selected a real file and I saw that the link was correct (something like /storage/0/emulated/Downloads on my Android device).
Also, sometimes it gives me error 3 as well (upload target source), some sort of server not found issue I think.
Is there something obvious I'm doing wrong and how would I be able to fix it? Is there a handier way, since I eventually want to link this to a MySQL database.
Thanks in advance!
Well found my answer (a while ago, this is for people stumbling across this post).
You can first try whether your JS works by changing the server to https://posttestserver.com/post.php. If you see an upload appearing there, there's a problem with the server.
The problem with me was that I didn't let Apache through the firewall at all, so uploads from anything besides my PC would fail...
var express=require('express');
var bodyParser=require('body-parser');
var formidable = require('formidable');
var util = require('util');
var app=express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var path=require('path');
var mysql =require('mysql');
var fs=require('fs');
app.use('/public',express.static(path.join(__dirname, 'public')));
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password : '',
port : 3306, //port mysql
database:'xxxxx'
});
app.post('/data', function(req, res) {
// create an incoming form object
var form = new formidable.IncomingForm(),
files = [],
fields = [];
// specify that we want to allow the user to upload multiple files in a single request
form.multiples = true;
// store all uploads in the /uploads directory
form.uploadDir = path.join(__dirname, '/public/images/uploads');
// every time a file has been uploaded successfully,
// rename it to it's orignal name
form.on('file', function(field, file) {
if (path.extname(file.name)=='') {
extension='.jpg';
}
else{
extension=path.extname(file.name);
}
var oldpath = file.path;
form.uploadDir = path.basename(file.name,extension).concat((Math.random()* 100),extension);
var newpath = './public/images/uploads/'+ path.basename(file.name,extension).concat((Math.random()* 100),extension);
//fs.rename(file.path, path.join(form.uploadDir, file.name));
fs.rename(oldpath, newpath);
});
form.on('field', function(field, value) {
fields[field] = value;
});
// log any errors that occur
form.on('error', function(err) {
console.log('An error has occured: \n' + err);
});
// once all the files have been uploaded, send a response to the client
//Call back at the end of the form.
form.on('end', function () {
res.writeHead(200, {
'content-type': 'text/plain'
});
res.write('received the data:\n\n');
// console.log(fields.name+'-'+fields.nickname);
var values={
name:fields.name,
nickname:fields.nickname,
email:fields.email,
password:fields.password,
dob:fields.dob,
gender:fields.gender,
phoneno:fields.phone
};
connection.query('INSERT INTO users SET ?', values, function(err,req,res){
if(err){
console.log('Connection result error '+err);
}
else{
console.log('Success');
}
});
res.end();
});
// parse the incoming request containing the form data
form.parse(req);
});
//app.use(app.router);
app.listen(5000);
I am wondering about how it might be possible to deploy a Node.js app on Azure Functions.
Basically, I have a function setup and running a basic hello world http example that looks like:
module.exports = function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
context.res = {
// status: 200, /* Defaults to 200 */
body: "Hello " + req.params.name
};
context.done();
};
The app I am trying to deploy into a function is a simple moc client that uses swagger (basically takes a request and returns some xml). The app.js looks like:
const SwaggerExpress = require('swagger-express-mw');
const app = require('express')();
const compression = require('compression');
const configSwagger = {
appRoot: __dirname, // required config
};
SwaggerExpress.create(configSwagger, (err, swaggerExpress) => {
if (err) {
throw err;
}
// install middleware
swaggerExpress.register(app);
// server configuration
const serverPort = process.env.PORT || 3001;
app.listen(serverPort, () => {
//logger.info('Listening on port %s', serverPort);
});
app.use(compression());
});
module.exports = app; // for testing
The thing I am not sure about is how to handle module.exports = app when modeul.exports is used to establish the function (i.e. module.exports = function (context, req))
You can try to use azure-function-express to enable your swagger middleware.
Note that certain middleware will not function correctly (for example, body-parser). This is because the functions req is not a stream - it is injected into the function with a 'body' property already populated.
I need to upload a file to s3 from koa, I'm pretty new to koa and probably missing something obvious. It actually does complete with 200, but the file never shows up on s3.
Here is a fragment from my app.js:
'use strict';
var jwt = require('koa-jwt');
var bodyParser = require('koa-bodyparser');
var koaBody = require('koa-body');
const app = module.exports = require('koa')()
.use(koaBody({multipart:true})) // this is to pase only multipart forms
.use(require('./routes/common'))
.use(require('./routes/auth'))
.use(require('./routes/users_public'))
.use(jwt({ secret: SECRET }))
//protected routes below this line
.use(require('./routes/subcontractors_private'))
.listen(process.env.PORT || 3000);
subcontractors_private.js looks like this:
'use strict';
var AWS = require('aws-sdk');
var fs = require('fs');
var zlib = require('zlib');
var S3_BUCKET = require('../consts').S3_BUCKET;
var S3_OPTIONS = require('../consts').S3_OPTIONS;
module.exports = require('koa-router')()
.post('/subcontractors/:subcontractor_id/coi', function *(next) {
var body = JSON.stringify(this.request.body, null, 2)
let subcontractor_id = this.params.subcontractor_id;
var file = this.request.body.files.coi.path;
var body = fs.createReadStream(file).pipe(zlib.createGzip());
var s3obj = new AWS.S3(
{params:
{
Bucket: 'coi-test',
Key: 'i/' + subcontractor_id + '.png.zgip'
}
});
s3obj.upload({Body: body})
.on('httpUploadProgress', function(evt) {
console.log(evt);
})
.send(function(err, data) {
console.log(err, data);
});
this.response.status = 200;
this.body = { "result": "subcontractor CIO successfully uploaded"};
})
.routes();
And finally, fragments of the subcontractor_private.js from the test folder:
'use strict';
const supertest = require('co-supertest'); // SuperAgent-driven library for testing HTTP servers
const expect = require('chai').expect; // BDD/TDD assertion library
require('co-mocha'); // enable support for generators in mocha tests using co
var uuid = require('uuid');
var db = require('../../consts').DB;
var moment = require('moment');
const app = require('../../app.js');
const request = supertest.agent(app.listen());
var assert = require('assert');
describe('/subcontractors private routes testing', function() {
it.only('should be able to upload COI for subcontractor', function*() {
const coi_expires_at = moment().add(1, 'd').format();
const response =
yield request.post('/subcontractors')
.set('Content-Type', 'application/json')
.set('Authorization', 'Bearer ' + token)
.send({name: "Joe Doh"})
.end();
//now try to upload the coi file
const response1 =
yield request.post('/subcontractors/' + response.body.subcontractor.id + "/coi")
.set('Authorization', 'Bearer ' + token)
.field('Content-Type', 'multipart/form-data')
.field('coi_expires_at', coi_expires_at)
.attach('coi', './assets/logo-big.png')
.end();
expect(response1.status).to.equal(200, response1.text);
expect(response1.body).to.be.an('object');
expect(response1.body).to.be.json;
expect(response1.body).to.contain.keys('result');
expect(response1.body.result).to.equal('subcontractor CIO successfully uploaded');
});
});
I tried the upload code as a standalone js file (ran via node) and it works fine. But when I run it as a node app from mocha test -- the method completes with response 200 and never finishes the upload. What am I doing wrong?
The problem is with asynchronous nature of this stuff. The test finishes w/o waiting for the request to finish (or even start properly).
I am not entirely clear how to do it correctly, but adding this:
it.only('should be able to upload COI for subcontractor', function*(done) {
(done callback argument) makes the test wait for the "done" callback to be invoked, thus allowing aws sdk to finish the request. I am not sure how to fix your test, however, because the this is twice-asynchronous. The controller method in your koa server returns asynchronously w/o waiting for S3 request to finish, so the mocha test has no way of waiting for S3 processing to complete.
(also see my related question:
aws-sdk s3 upload not working from mocha test)