Supporting POST in grunt-serve using connect-rest? - connect

I'm trying to use grunt serve with the connect-rest middleware. I tried to configure connect-rest in my Gruntfile.js which worked for GET requests, but not for POST. I wonder what I'm missing out. This is my Gruntfile.js (excerpt):
module.exports = function (grunt) {
var postResponder = function(request, content, callback) {
callback(null, {Status : 42});
}
var Rest = require('connect-rest');
var rest = Rest.create( {context: '/'} );
rest.assign(['get'],'/*', postResponder);
...
livereload: {
options: {
open: true,
middleware: function (connect) {
return [
connect().use(rest.processRequest()),
connect.static('.tmp'),
connect().use('/bower_components', connect.static('./bower_components')),
connect().use('/app/styles', connect.static('./app/styles')),
connect.static(appConfig.app)
];
}
}
},
That works like a charm. But when I change that one line to:
rest.assign(['post'],'/*', postResponder);
or
rest.assign('*','/*', postResponder);
Then on posting I get the following entry in the log (running grunt --debug):
[D] server POST /api/groups.json?cache=false&force=true&requesttime=2015-10-29T06:46:24.443Z 404 103 - 2.027 ms
and a 404 error when posting (get works).
What do I miss?

It turned out, that for my purpose - being able to support post of JSON as a mock I didn't need connect-rest in the first place. The build in capabilities of connect provided all I needed. So the start of the Gruntfile.js now looks like this:
module.exports = function (grunt) {
var bodyParser = require("body-parser");
var postResponder = function(request, response, next) {
if (request.method === 'POST') {
console.log(request.method+" "+request.url);
response.setHeader('Content-Type', 'application/json');
response.statusCode = 200;
response.end(JSON.stringify(request.body));
} else {
next();
}
};
and the livereload section like this:
livereload: {
options: {
open: true,
middleware: function (connect) {
return [
connect().use('/api', bodyParser.json()),
connect().use('/api', postResponder),
connect.static('.tmp'),
connect().use('/bower_components',
connect.static('./bower_components')),
connect().use('/app/styles', connect.static('./app/styles')),
connect.static(appConfig.app)
];
}
}
}
Note: I limit postbacks to calls to /api here - you might need different endpoints. The example simply echos back the JSON received.

Related

Can Webpack Dev server create files in my project root?

I have an project set up and running with Webpack 5.28.0 and webpack-dev-server 4.11.1
Its all working nicely but I would like to be able to have the dev server write some files back to my project root. These are debug/log files that I'd like to save as JSON.
I'd also like this to be automatic, I don't want to have to click anything or trigger the action manually.
So the ideal flow would be that I run npm start, my build kicks off in a browser, the page generates a load of log data and this is then written back to my project root. Either using some browser function or calling back to Node script in my build.
Is this possible with dev-server?
You could setup the dev-server middleware to add an API endpoint to accept data and write it to your filesystem
// webpack.config.js
const { writeFile } = require("node:fs/promises");
const bodyParser = require("body-parser");
module.exports = {
// ...
devServer: {
setupMiddlewares: (middlewares, devServer) => {
devServer.app?.post(
"/__log",
bodyParser.json(),
async (req, res, next) => {
try {
await writeFile(
"debug-log.json",
JSON.stringify(req.body, null, 2)
);
res.sendStatus(202);
} catch (err) {
next(err);
}
}
);
return middlewares;
},
},
};
Then your front-end app needs only to construct the payload and POST it to the dev-server
const debugData = { /* ... */ };
fetch("/__log", {
method: "POST",
body: JSON.stringify(debugData),
headers: { "content-type": "application/json" },
});

Vaadin-Upload not working with http-proxy-middleware

I have a node.js / Polymer 1 website. I am using HTTP-proxy-middleware to route api calls (/api/webapi) to my backend API server.
On one of the pages I have a vaadin-upload (v2.3.0) component that sends files to the api. Everything appears to work fine when running on local host but when I deploy to our test servers I am experiencing issues. Either the upload completes quickly and then sits "processing" for a long time or it stalls.
Using postman I have managed to send a file to the API directly, to the proxy server. I have also managed to get the upload component to call the API directly. All these cases work correctly, and output from the API would suggest in all cases the API is receiving/processing data at the same rate. From this I have narrowed it down to an interaction between Vaadin-Upload and http-proxy-middleware.
Does anyone have experience with this and help me configure the proxy correctly.
proxy configuration:
const url = require('url');
var hpmproxy = require('http-proxy-middleware');
var config = require('../config');
// Adds user authorization token from passport to request
var addAuthTokenMiddleware = function (req, res, next) {
if (req.session && req.isAuthenticated()) {
req.headers['authorization'] = 'Bearer ' + req.user.token;
next();
} else {
req.abort();
}
};
function isLoggedIn(req, res, next) {
// if user is authenticated in the session, carry on
if (req.session && req.isAuthenticated())
return next();
res.status(403).end();
};
function restream(proxyReq, req) {
if (isMultipartRequest(req))
console.log('Multipart');
if (!isEmpty(req.body)) {
console.log("parse");
var bodyData = JSON.stringify(req.body);
proxyReq.setHeader('Content-Type', 'application/json');
proxyReq.setHeader('Content-Length', Buffer.byteLength(bodyData));
proxyReq.write(bodyData);
}
console.log("-->[proxyReq]----", proxyReq.path, proxyReq.getHeader('Content-Type'));
};
function handleResponse(proxyRes, req, res) {
console.log('---[proxyRes]<---', proxyRes.req.method, proxyRes.req.path, proxyRes.statusCode);
};
function isMultipartRequest(req) {
let contentTypeHeader = req.headers['content-type'];
return contentTypeHeader && contentTypeHeader.indexOf('multipart') > -1;
};
function isEmpty(obj) {
for(var prop in obj) {
if(obj.hasOwnProperty(prop))
return false;
}
return JSON.stringify(obj) === JSON.stringify({});
}
var options = {
target: config.webApiHost,
changeOrigin: true, // needed for virtual hosted sites
pathRewrite: {
'^/api/webapi/': config.webApiPath
},
secure: !config.selfSigned,
onProxyRes: handleResponse,
onProxyReq: restream
// ,logLevel: 'debug'
};
var hpmApiProxy = hpmproxy(options);
module.exports = function (app, passport, config) {
app.use('/api/webapi/', isLoggedIn, addAuthTokenMiddleware, hpmApiProxy);
console.log(' WebAPI Proxy Loaded');
}

Jupyterhub Configurable Http Proxy issue

I have been working with Jupyterhub's Configurable Http Proxy and I have been adding the necessary options for the proxy to handle client's ssl certificates without having to use the command line options.
My main goal is that I want to take in a clients request to the proxy and add their certificate information to the header. Once in the header, I will use jupyterhub's authenticator to craft a username.
My issue is that when I use the proxy.on('proxyReq method available for the http-proxy to set the header, I get this error: [Error: Can't set headers after they are sent.]
I have been looking all over the code to see where a response/request is being written or sent, but I cannot find it.
Here is the ConfigurableProxy function code, I can give you more if needed:
function ConfigurableProxy (options) {
var that = this;
this.options = options || {};
this.trie = new trie.URLTrie();
this.auth_token = this.options.auth_token;
this.includePrefix = options.includePrefix === undefined ? true : options.includePrefix;
this.routes = {};
this.host_routing = this.options.host_routing;
this.error_target = options.error_target;
if (this.error_target && this.error_target.slice(-1) !== '/') {
this.error_target = this.error_target + '/'; // ensure trailing /
}
this.error_path = options.error_path || path.join(__dirname, 'error');
if (this.options.default_target) {
this.add_route('/', {
target: this.options.default_target
});
}
options.ws = true;
options.secure= true;
// These are the ssl options
options.ssl = {
//Right the key and cert are relative path on my computer
//but these can be changed.
key: fs.readFileSync('/Users/grantherman/Desktop/jupyterHubCSProject/ssl/server.key'),
cert: fs.readFileSync('/Users/grantherman/Desktop/jupyterHubCSProject/ssl/server.crt'),
requestCert: true,
//Right now this is set to false, but if we add a CA to these options
// and set this to true, the proxy will reject all unkown ssl certs
rejectUnauthorized: false
};
var response = [];
var data = [];
var proxy = this.proxy = httpProxy.createProxyServer(options);
proxy.on('proxyReq', function(proxyReq, req, res, options) {
console.log("proxy request");
try{
proxyReq.setHeader('X-Special-Proxy-Header', req.socket.getPeerCertificate());
}catch(err){
console.log(err);
}
});
proxy.on('data', function(data, req, res, options) {
data.push(data);
});
proxy.on('proxyRes', function(proxyRes, req, res, options) {
response.push(proxyRes);
});
proxy.on('error', function(error, req, res, options) {
log.add(error);
});
proxy.on('close', function (req, socket, head) {
// view disconnected websocket connections
console.log('Client disconnected');
});
// tornado-style regex routing,
// because cross-language cargo-culting is always a good idea
this.api_handlers = [
[ /^\/api\/routes(\/.*)?$/, {
get : bound(this, authorized(this.get_routes)),
post : json_handler(bound(this, authorized(this.post_routes))),
'delete' : bound(this, authorized(this.delete_routes))
} ]
];
I think this is going to require modifications to configurable-http-proxy itself. The place to add headers is on the original req object prior to initiating the proxied request, here.
It would look something like:
ConfigurableProxy.prototype.handle_proxy = function (kind, req, res) {
...
req.headers['X-My-Header'] = 'My-Value';
// dispatch the actual method
this.proxy[kind].apply(this.proxy, args);
Adding a hook to CHP for modifying the request here, on its way through, should make this doable without modifying the CHP source.

How can I use body-parser with LoopBack?

I see that LoopBack has the Express 3.x middleware built-in. Indeed, body-parser is in loopback/node_modules. But I cannot figure out how to use it as middleware. I have never worked with Express 3.x, so maybe it's just that. require does not work, obviously, unless I install body-parser as a dependency in my project.
What should I do in server.js to use body-parser so that web forms are parsed into req.params? That's what it does, right?
After hours of frustration, I just added it to middleware.json like so:
"parse": {
"body-parser#json": {},
"body-parser#urlencoded": {"params": { "extended": true }}
}
It is installed as a dependency. Now I have form data in req.body in my routes. My server/boot/routes.js looks like this:
module.exports = function(app) {
app.post('/mailing_list', function(req, res) {
console.log(req.body.email);
res.send({"status": 1, "message": "Successfully added to mailing list."})
});
}
Just to be more clear about what it takes to get this working (because I still struggled for a while after finding this answer!), here are the steps I took:
As described above, in $APP_HOME/server/middleware.json, add the body-parser to the "parse" section:
{
"initial:before": {
"loopback#favicon": {}
},
"initial": {
"compression": {},
"cors": {
"params": {
"origin": true,
"credentials": true,
"maxAge": 86400
}
}
},
"session": {
},
"auth": {
},
"parse": {
"body-parser#json": {},
"body-parser#urlencoded": {"params": { "extended": true }}
},
"routes": {
},
"files": {
},
"final": {
"loopback#urlNotFound": {}
},
"final:after": {
"errorhandler": {}
}
}
Next, I added the parser setup to $APP_HOME/server/server.js:
var loopback = require('loopback');
var bodyParser = require('body-parser');
var multer = require('multer');
var boot = require('loopback-boot');
var app = module.exports = loopback();
app.use(bodyParser.json()); // for parsing application/json
app.use(bodyParser.urlencoded({ extended: true })); // for parsing application/x-www-form-urlencoded
app.use(multer()); // for parsing multipart/form-data
app.start = function() {
...
...
cont'd
Then, since I didn't want to mess with custom routes, I added the following to $APP_HOME/common/models/model.js:
module.exports = function(Model) {
Model.incoming = function(req, cb) {
cb(null, 'Hey there, ' + req.body.sender);
}
Model.remoteMethod(
'incoming',
{ accepts: [
{ arg: 'req', type: 'object', http: function(ctx) {
return ctx.req;
}
}],
returns: {arg: 'summary', type: 'string'}
}
);
};
I can now run my app with $> slc run .
When I post to the endpoint, it now gets parsed properly, and all is well with the world. I hope this helps someone else!
I'm using loopback 2.14.0:
To make use of the body-parser in your custom bootscript routes you should only need to:
1) install body-parser
npm install body-parser --save
2) Register the the module in middleware.json
"parse": {
"body-parser#json": {},
"body-parser#urlencoded": {"params": { "extended": true }}
},
There is no need to require the parser setup in server.js, loopback does this for you when you register the middleware.
Please note body parser is now installed in your source "node_modules" directory as well as in the loopback modules directory.
If at all possible try register custom remote methods as described in the loopback documentation.
Registering routes this way gives you access to loopback's body-parser out of the box and is the 'cleanest' implementation.
Based on this answer https://stackoverflow.com/a/29813184/605586 from Ben Carlson you have to
npm install --save body-parser multer
then in your server.js require the modules:
var bodyParser = require('body-parser');
var multer = require('multer');
and use them before app.start:
app.use(bodyParser.json()); // for parsing application/json
app.use(bodyParser.urlencoded({ extended: true })); // for parsing application/x-www-form-urlencoded
app.use(multer().any()); // for parsing multipart/form-data
Then you can create a remote method:
App.incoming = function (req, cb) {
console.log(req);
// the files are available as req.files.
// the body fields are available in req.body
cb(null, 'Hey there, ' + req.body.sender);
}
App.remoteMethod(
'incoming',
{
accepts: [
{
arg: 'req', type: 'object', http: function (ctx) {
return ctx.req;
}
}],
returns: { arg: 'summary', type: 'string' }
}
);
Using this you can upload files and additional data fields to loopback with multipart/form-data.
I'm posting this just for informational purposes. I ran into this same issue and found this works as well. You can add a file in the server/boot/ directory with the following:
var bodyParser = require('body-parser');
module.exports = function(app) {
app.use(bodyParser.urlencoded({ extended: true }));
}
Of course, you have to install the package by running:
npm install --save body-parser
That will save the package under the node_modules directory.
If you want it to be the first thing to run, you can start the file name with a "0" since these are loaded in alphabetical order.
That being said, I figure it is more 'correct' and elegant to use the middleware configuration approach mentioned above than this one, but I share it in the event someone else finds it useful.
In Loopback ^3.22.0, I can suffice by adding the
"parse": {
"body-parser#json": {}
},
to the server/middleware.json
in order to consume application/json post bodies in the server/boot/routes.js
module.exports = function(app) {
app.post('/api/sayhello', function(req, res, next) {
console.log(req.body)
One could also use the built-in parser of express framework inside loopback like this, for example for json parsing:
app.use(app.loopback.json());
I have different test result.
1) For json and urlencode types, there is NO need to add their parser in middleware.json. I can get data from req.body successfully without adding body-parser#json and body-parser#urlencoded. The Loopback should already support them.
Loopback related source code(I think)
1. in strong-remote repo , rest-adapter.js , there is body-parser for json and urlendcoded
line 35
var json = bodyParser.json;
var urlencoded = bodyParser.urlencoded;
line 315
root.use(urlencoded(urlencodedOptions));
root.use(json(jsonOptions));
2.
remote-object.js
line 33
require('./rest-adapter');
line 97
RemoteObjects.prototype.handler = function(nameOrClass, options) {
var Adapter = this.adapter(nameOrClass);
var adapter = new Adapter(this, options);
var handler = adapter.createHandler();
if (handler) {
// allow adapter reference from handler
handler.adapter = adapter;
}
return handler;
};
2) For the raw type, we can add body-parser#raw in "parse" part in middleware.json , of course, it needs to npm install body-parser.
My test code :
1.My readable stream is from the file uploadRaw.txt , the content is :
GreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaGreenTeaEeeeend
2. middleware.json
"parse": {
"body-parser#raw": {
"paths": [
"/api/v1/Buckets/?/upload"
]
}
},
3.
it('application/octet-stream -- upload non-form', () =>
new Promise((resolve) => {
const options = {
method: 'POST',
host: testConfig.server.host,
port: testConfig.server.port,
path: ${appconfig.restApiRoot}/Buckets/${TEST_CONTAINER}/upload,
headers: {
'Content-Type': 'application/octet-stream',
},
};
const request = http.request(options);
request.on('error', (e) => {
logger.debug(problem with request: ${e.message});
});
const readStream = fs.createReadStream('tests/resources/uploadRaw.txt');
readStream.pipe(request);
resolve();
}));
4.
Bucket.upload = (req, res, options, cb) => {
logger.debug('sssssss in uploadFileToContainer');
fs.writeFile('/Users/caiyufei/TEA/green.txt', req.body, (err) => {
if (err) {
logger.debug('oh, failed to write file');
return;
}
logger.debug('green file is saved!');
});
};
OR
Bucket.upload = (req, res, options, cb) => {
logger.debug('sssssss in uploadFileToContainer');
const writeStream = fs.createWriteStream('/Users/caiyufei/TEA/green.txt');
const streamOptions = {
highWaterMark: 16384,`enter code here`
encoding: null,
}
streamifier.createReadStream(Buffer.from(req.body), streamOptions).pipe(writeStream);
};
5. package.json
"body-parser": "^1.17.1",
"streamifier": "^0.1.1",

How to store routes in separate files when using Hapi?

All of the Hapi examples (and similar in Express) shows routes are defined in the starting file:
var Hapi = require('hapi');
var server = new Hapi.Server();
server.connection({ port: 8000 });
server.route({
method: 'GET',
path: '/',
handler: function (request, reply) {
reply('Hello, world!');
}
});
server.route({
method: 'GET',
path: '/{name}',
handler: function (request, reply) {
reply('Hello, ' + encodeURIComponent(request.params.name) + '!');
}
});
server.start(function () {
console.log('Server running at:', server.info.uri);
});
However, it's not hard to image how large this file can grow when implementing production application with a ton of different routes. Therefore I would like to break down routes, group them and store in separate files, like UserRoutes.js, CartRoutes.js and then attach them in the main file (add to server object). How would you suggest to separate that and then add?
You can create a separate file for user routes (config/routes/user.js):
module.exports = [
{ method: 'GET', path: '/users', handler: function () {} },
{ method: 'GET', path: '/users/{id}', handler: function () {} }
];
Similarly with cart. Then create an index file in config/routes (config/routes/index.js):
var cart = require('./cart');
var user = require('./user');
module.exports = [].concat(cart, user);
You can then load this index file in the main file and call server.route():
var routes = require('./config/routes');
...
server.route(routes);
Alternatively, for config/routes/index.js, instead of adding the route files (e.g. cart, user) manually, you can load them dynamically:
const fs = require('fs');
let routes = [];
fs.readdirSync(__dirname)
.filter(file => file != 'index.js')
.forEach(file => {
routes = routes.concat(require(`./${file}`))
});
module.exports = routes;
You should try Glue plugin: https://github.com/hapijs/glue. It allows you to modularize your application. You can place your routes in separate subdirectories and then include them as Hapi.js plugins. You can also include other plugins (Inert, Vision, Good) with Glue as well as configure your application with a manifest object (or json file).
Quick exapmple:
server.js:
var Hapi = require('hapi');
var Glue = require('glue');
var manifest = {
connections: [{
port: 8080
}],
plugins: [
{ inert: [{}] },
{ vision: [{}] },
{ './index': null },
{
'./api': [{
routes: {
prefix: '/api/v1'
}
}]
}
]
};
var options = {
relativeTo: __dirname + '/modules'
};
Glue.compose(manifest, options, function (err, server) {
server.start(function(err) {
console.log('Server running at: %s://%s:%s', server.info.protocol, server.info.address, server.info.port);
});
});
./modules/index/index.js:
exports.register = function(server, options, next) {
server.route({
method: 'GET',
path: '/',
handler: require('./home')
});
});
exports.register.attributes = {
pkg: require('./package.json')
};
./modules/index/package.json:
{
"name": "IndexRoute",
"version": "1.0.0"
}
./modules/index/home.js:
exports.register = function(req, reply) {
reply.view('home', { title: 'Awesome' });
});
Have a look at this wonderful article by Dave Stevens for more details and examples.
You can use require-hapiroutes to do some of the organization and loading for you. (I am the author so I am a little biased, I wrote it to make my life easier in managing routes)
I am a big fan of require-directory and and wanted a way to manage my routes just as easily. This lets you mix and match routes in your modules and modules in directories with routes.
You can then do something like this...
var routes = require('./routes');
server.route(routes.routes);
Then in your directory you could have a route file like...
module.exports = [
{
method : 'GET',
path : '/route1',
handler : routeHandler1,
config : {
description: 'my route description',
notes: 'Important stuff to know about this route',
tags : ['app']
}
},
{
method : 'GET',
path : '/route2',
handler : routeHandler2,
config : {
description: 'my route description',
notes: 'Important stuff to know about this route',
tags : ['app']
}
}];
Or, you can mix and match by assigning to a "routes" property on the module
module.exports.routes = [
{
method : 'GET',
path : '/route1',
handler : routeHandler1,
config : {
description: 'my route description',
notes: 'Important stuff to know about this route',
tags : ['app']
}
},
{
method : 'GET',
path : '/route2',
handler : routeHandler2,
config : {
description: 'my route description',
notes: 'Important stuff to know about this route',
tags : ['app']
}
}];
Always, good to have options. There is full documentation on the github or npmjs site for it.
or you can use a index file to load all the routes
in the directory
index.js
/**
* Module dependencies.
*/
const fs = require('fs');
const path = require('path');
const basename = path.basename(__filename);
const routes = fs.readdirSync(__dirname)
.filter((file) => {
return (file.indexOf('.') !== 0) && (file !== basename);
})
.map((file) => {
return require(path.join(__dirname, file));
});
module.exports = routes;
other files in the same directory like:
module.exports = [
{
method: 'POST',
path: '/api/user',
config: {
}
},
{
method: 'PUT',
path: 'api/user/{userId}',
config: {
}
}
];
and than in your root/index
const Routes = require('./src/routes');
/**
* Add all the routes
*/
for (var route in Routes) {
server.route(Routes[route]);
}
Interesting to see so many different solutions, here is another one.
Globbing to the rescue
For my latest project I settled on globbing for files with a particular name pattern and then requiring them into the server one by one.
Import routes after having created the server object
// Construct and setup the server object.
// ...
// Require routes.
Glob.sync('**/*route*.js', { cwd: __dirname }).forEach(function (ith) {
const route = require('./' + ith);
if (route.hasOwnProperty('method') && route.hasOwnProperty('path')) {
console.log('Adding route:', route.method, route.path);
server.route(route);
}
});
// Start the server.
// ...
The glob pattern **/*route*.js will find all files within and below the specified current working directory with a name that contains the word route and ends with the suffix .js.
File structure
With the help of globbing we have a loose coupling between the server object and its routes. Just add new route files and they will be included the next time you restart your server.
I like to structure the route files according to their path and naming them with their HTTP-method, like so:
server.js
routes/
users/
get-route.js
patch-route.js
put-route.js
articles/
get-route.js
patch-route.js
put-route.js
Example route file routes/users/get-route.js
module.exports = {
method: 'GET',
path: '/users',
config: {
description: 'Fetch users',
// ...
},
handler: function (request, reply) {
// ...
}
};
Final thoughts
Globbing and iterating over files is not a particularly fast process, hence a caching layer may be worth investigating in production builds depending on your circumstances.
Try hapi-auto-route plugin! It's is very simple to use and allow prefix in your route path.
Full disclosure: I am the author of this plugin
I know this is already approved. I put down my solution in case someone wants a quick fix and new to Hapi.
Also I included some NPM too so Newbees can see how to to use the server.register with multiple plugin in the case ( good + hapi-auto-route )
Installed some npm packages:
npm i -S hapi-auto-route
npm i -S good-console
npm i -S good
// server.js
'use strict';
const Hapi = require('hapi');
const Good = require('good');
const AutoRoute = require('hapi-auto-route');
const server = new Hapi.Server();
server.connection(
{
routes: { cors: true },
port: 3000,
host: 'localhost',
labels: ['web']
}
);
server.register([{
register: Good,
options: {
reporters: {
console: [{
module: 'good-squeeze',
name: 'Squeeze',
args: [{
response: '*',
log: '*'
}]
}, {
module: 'good-console'
}, 'stdout']
}
}
}, {
register: AutoRoute,
options: {}
}], (err) => {
if (err) {
throw err; // something bad happened loading the plugin
}
server.start((err) => {
if (err) {
throw err;
}
server.log('info', 'Server running at: ' + server.info.uri);
});
});
In your routes/user.js
module.exports =
[
{
method: 'GET',
path: '/',
handler: (request, reply) => {
reply('Hello, world!');
}
},
{
method: 'GET',
path: '/another',
handler: (request, reply) => {
reply('Hello, world again!');
}
},
];
Now run: node server.js
Cheers

Resources