I am wondering about how it might be possible to deploy a Node.js app on Azure Functions.
Basically, I have a function setup and running a basic hello world http example that looks like:
module.exports = function (context, req) {
context.log('JavaScript HTTP trigger function processed a request.');
context.res = {
// status: 200, /* Defaults to 200 */
body: "Hello " + req.params.name
};
context.done();
};
The app I am trying to deploy into a function is a simple moc client that uses swagger (basically takes a request and returns some xml). The app.js looks like:
const SwaggerExpress = require('swagger-express-mw');
const app = require('express')();
const compression = require('compression');
const configSwagger = {
appRoot: __dirname, // required config
};
SwaggerExpress.create(configSwagger, (err, swaggerExpress) => {
if (err) {
throw err;
}
// install middleware
swaggerExpress.register(app);
// server configuration
const serverPort = process.env.PORT || 3001;
app.listen(serverPort, () => {
//logger.info('Listening on port %s', serverPort);
});
app.use(compression());
});
module.exports = app; // for testing
The thing I am not sure about is how to handle module.exports = app when modeul.exports is used to establish the function (i.e. module.exports = function (context, req))
You can try to use azure-function-express to enable your swagger middleware.
Note that certain middleware will not function correctly (for example, body-parser). This is because the functions req is not a stream - it is injected into the function with a 'body' property already populated.
Related
I'm currently using Glitch's(Glitch.com) node.js to connect Dialogflow to code and I'm running into a problem. As you can see below; I have two intents I'm trying to pass values to, characterHandler and openHandler.
Now the weird thing is that it does execute the web hook correctly if I trigger the intent on Dialogflow corresponding to "characterHandler", but it returns "UnhandledPromiseRejectionWarning: Error: no matching intent handler for: null" in the console and fails while triggering "openHandler" and I have no clue why.
Does anyone know what I'm doing wrong?
'use strict';
process.env.DEBUG = 'actions-on-google:*';
const express = require('express');
const bodyParser = require('body-parser');
const request = require("request");
const { DialogflowApp } = require('actions-on-google');
const Map = require('es6-map');
const app = express();
app.use(bodyParser.json());
let characters = ['The Pied Piper', 'Red Riding Hood', 'The Big Bad Wolf'];
// [START Action]
app.post('/', function (request, response) {
const assistant = new DialogflowApp({request, response});
console.log('Request headers: ' + JSON.stringify(request.headers));
console.log('Request body: ' + JSON.stringify(request.body));
const CHARACTERS = 'story.characters';
const OPENINGTIMES = 'openingTimes';
function characterHandler (assistant) {
let responseText = "How about";
responseText = characters[Math.floor(Math.random() * characters.length)];
assistant.tell(responseText);
}
function openHandler (assistant) {
assistant.tell('This attraction is currently full');
}
const actionMap = new Map();
actionMap.set(CHARACTERS, characterHandler);
actionMap.set(OPENINGTIMES, openHandler);
assistant.handleRequest(actionMap);
});
// [END Action]
// Renders the homepage
app.get('/', function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.write('');
res.end();
});
if (module === require.main) {
// [START server]
// Start the server
let server = app.listen(process.env.PORT || 3000, function () {
let port = server.address().port;
console.log('App listening on port %s', port);
});
// [END server]
}
module.exports = app;
Your open handler function is mapped to 'openingTimes'. Make sure that exactly matches the intent name and make sure that the intent was actually saved correctly.
I'm trying to figure out how to use DialogFlow with express/bodyParser and the node.js library v2 functions without Firebase (on my own server). I have it working with the request/response JSON data, but I can't figure out what I need to do to use the node.js library function dialogflow(). Here's a snippet of what I have that's working with the JSON data:
const {config} = require('./config');
const https = require('https');
const express = require('express');
const bodyParser = require('body-parser');
const fs = require('fs');
const options = {
key: fs.readFileSync(config.SSLDIR + 'privkey.pem'),
cert: fs.readFileSync(config.SSLDIR + 'cert.pem'),
ca: fs.readFileSync(config.SSLDIR + 'chain.pem')
};
const eapp = express();
eapp.disable('x-powered-by');
eapp.use(bodyParser.urlencoded({extended: true}));
eapp.use(bodyParser.json());
const server = https.createServer(options, eapp).listen(config.LISTEN_PORT, () => {
console.log(`API listening on port ${config.LISTEN_PORT}. Ctrl-C to end.`);
});
server.on('error', (e) => {
console.log(`Can't start server! Error is ${e}`);
process.exit();
});
// I have an Agent class that reads the request object and handles it
eapp.post("/actions", (request, response) => {
const agent = new Agent(request, response);
agent.run();
return;
});
eapp.all('*', (request, response) => {
console.log("Invalid Access");
response.sendStatus(404);
});
The only solution posted online that I could find said to use the following code:
const express = require('express');
const bodyParser = require('body-parser');
const { dialogflow } = require('actions-on-google');
const app = dialogflow();
express().use(bodyParser.json(), app).listen(3000);
But I'm confused about:
DialogFlow fulfillment requires an https endpoint, so don't I have
to create an https server like I did?
How can I integrate this example into what I've already done to stop
using the JSON data and start using the node.js functions from
app=dialogflow() in the library?
The app instance created using the dialogflow function can be used like an Express Request handler function. Thus, you can call it with the Express request and response object to handle the request.
In the run function for your Agent class, you can do something like
run() {
const request = ...; // Express request object
const response = ...; // Express response object
const app = ...; // app instance created using the dialogflow function
app(request, response); // call app with the Express objects
}
Then when you deployed this server to a public HTTPS endpoint, you can set the fulfillment url in Dialogflow to something like:
https://subdomain.domain.tld/actions where /actions was the post endpoint you listened to in the code.
In the end, it was very simple. I just needed to include the app in bodyparser:
eapp.use(bodyParser.json(), app);
I want to pull a URL from the DB and use it as the proxied URL. However the setup I've come up with initializes a new BrowserSync server for each URL, using incrementing port numbers.
Is there a way to accomplish this without initializing a new BrowserSync server every time?
Or should I be using another approach?
var bs = require("browser-sync");
var express = require("express");
var router = express.Router();
var app = express();
router.get("/", function(req, res){
var proxyUrl = getUrl() //get url from db (www.example.com)
bs.create("bs1").init({
notify: false,
open: false,
ui: false,
port: 10000,
proxy: proxyUrl
});
res.send();
});
app.use(router);
app.listen(8080, function(){
console.log('listening on *:8080');
});
The above is fine(ish) but is it good practice to be initializing a new server for every URL (potentially thousands)?
And is it safe to be exposing a new port number to every user of the system? (Can I mask this with a subdomain?)
Update
My end goal is to use a unique subdomain to refer to each proxy url.
For example:
sub1.mysite.com proxies www.example.com,
sub2.mysite.com proxies www.example2.com
Browser-sync will not work as the proxy is tie to server setup.
I use following packages:
express
express-http-proxy
vhost (express vhost)
const port = 8080;
var app = require('express')();
var proxy = require('express-http-proxy');
var url = require('url');
var vhost = require('vhost');
app.listen(port);
/* Assuming getUrl() will return an array of sites */
// var sites = getUrl();
// DO NOT put '/' at the end of site
var sites = [
'http://www.bing.com',
'http://samanthagooden.com',
'http://www.courtleigh.com'
];
var i = 0;
sites.forEach(site => {
i++;
var subDomain = 'sub' + i + '.mysite.com';
app.use(vhost(subDomain, proxy(site, {
forwardPath: (req, res) => url.parse(req.url).path,
intercept: (rsp, data, req, res, callback) => {
if (res._headers['content-type']) {
var contentType = res._headers['content-type'];
if (
contentType.indexOf('text') !== -1 ||
contentType.indexOf('javascript') !== -1
) {
// Replace link if content-type = text or javascript
var reg = new RegExp(site, 'g');
res.send(data.toString().replace(reg, ''));
} else {
res.send(data);
}
} else {
res.send(data);
}
}
})));
console.log(subDomain + ':' + port + ' proxy: ' + site);
});
The above example will create following proxies:
sub1.mysite.com:8080 proxy: www.bing.com
sub2.mysite.com:8080 proxy: www.example.com
Maybe I'm misunderstanding what you are trying to do, but Browsersync and express seems a bit overkill in this case, why not just use node-http-proxy with the native http module?
var http = require('http')
var httpProxy = require('http-proxy')
var options = ...
var proxy = httpProxy.createProxyServer(options)
var server = http.createServer(function (req, res) {
var proxyUrl = getUrl()
proxy.web(req, res, { target: proxyUrl })
})
server.listen(8080, function () {
console.log('listening on *:8080')
})
As per me If you want SAAS service using proxy is not the good idea to go is what am thinking.. if you are going with proxy for each client will create process with new port... My Solution is to create node server with listen localhost and map *.domain.com to the server..
If you are using individual database for each client :-
in node logic get cname from request host and use that reference to connect database.
Final Controller code would be..
var express = require('express');
var router = express.Router();
var MongoClient = require('mongodb').MongoClient;
/* GET home page. */
router.get('/', function(req, res, next) {
var client = req.subdomains[0];
console.log(client);
MongoClient.connect('mongodb://localhost:27017/'+client, function(err, db) {
if (err) {
throw err;
}
db.collection('app1').find().toArray(function(err, result) {
if (err) {
throw err;
}
console.log('data');
console.log(result);
});
});
res.render('index', { title: 'Express' });
});
module.exports = router;
~
~
In future if you get more clients you can implement node cluster or standard Ubuntu cluster using webservice
Here is my test file:
var request = require('superagent'),
expect = require('expect'),
sinon = require('sinon'),
app = require("../../../../app");
request = request(app);
describe("HealthCheck", function () {
describe("/health_check", function () {
it("should return a 200 status", function (done) {
request
.get("http://localhost:3000/health_check")
.end(function(err, res) {
expect(res).to.exist;
expect(res.status).to.equal(200);
expect(res.body).to.contain('OK');
return done();
});
});
});
})
The error I keep running into is
1) HealthCheck /health_check should return a 200 status:
TypeError: Cannot call method 'end' of undefined
Can't figure out what I am doing wrong
Here is my app.js:
require('coffee-script/register');
var express = require('express'),
config = require('./config/config'),
fs = require('fs');
var app = express();
require('./config/express')(app, config);
app.listen(config.port);
exports.app = app;
You're exporting app as app
Therefore, when you require the module, you'll need to require the name of the function you're exporting - in this case 'app'
So, you can either change your test
var request = require('superagent'),
expect = require('expect'),
sinon = require('sinon'),
app = require("../../../../app");
request = request(app.app);
Or change your app.js to exports a single function:
Change
exports.app = app;
to
module.exports = app;
There's a good article on this here: http://openmymind.net/2012/2/3/Node-Require-and-Exports/
I need help in trying to solve this scenario
I have a file web.js. Over there I have
var express = require("express");
var app = express();
var web2 = require("./web2");
/* Code the start the server on the required port*/
app.get('/param1', function(req, res){
console.log("INSIDE GET METHOD OF WEB.JS");
});
module.exports.app = app
I have another file web2.js. over there I have
var web = require("./web");
app = web.app;
app.get('/param2', function(req, res){
console.log("INSIDE GET METHOD OF WEB2.JS");
});
While starting I get an error
TypeError: Cannot call method 'post' of undefined
If I remove the line 3 from web.js -- I am able to start the server, but a request for http:///param2 gives a 404
Updated scenario:
I am using pg database and I try to create a client that keeps an instance of the client(in web.js). I then pass this to other file(web2.js). In web.js I always get this client as null
in web.js I have the following code
var pg = require("pg");
var pgclient;
app.get('*', function(req,res,next){
pg.connect(process.env.DATABASE_URL, function(err, client, done) {
if(client != null){
pgclient = client;
console.log("Client connection with Postgres DB is established");
next();
}
}
}
require("./web2.js")(app, pgclient);
in web2.js, I have the following code
module.exports = function(app, pgclient){
app.get('/param1', function(req,res){
if(pgclient != null){
}
else{
res.send(500, "pgclient is NULL");
}
});
}
The code never reaches the if block(if(pgclient != null)) in web2.js
The problem is the cyclic dependency between web.js and web2.js. When web2.js requires web.js, web.js's module.exports hasn't been set yet. I would rather do something like this:
web.js
var express = require("express");
var app = express();
app.get("/param1", function (req, res) {
// ...
});
require("./web2")(app);
app.listen(/* port number */);
web2.js
module.exports = function (app) {
app.get("/param2", function (req, res) {
// ...
});
};