ExpressJs server along with command line interface - node.js

I'm developing a REST API server using express.js but I would like to allow the developer to issue command trough the command line.
It is a development server that read a config.json, create endpoint and store data in-memory.
I would like to give the developer the ability to reload the base data without restarting the express server.
Any idea?

I just thought of what seems to me like a more elegant solution to your problem that you may be interested in: having the server automatically reload the config.json file whenever it changes. This is trivial to implement in Node.js:
const fs = require("fs"),
path = require("path");
var filePath = path.join(__dirname, "config.json");
fs.watch(filePath, function(event) {
if (event === "change") {
var file = require(filePath); // require loads JSON as a JS object
// do something with the newly loaded file here
console.log("Detected change, config file automatically reloaded.");
}
});

Perhaps a better option would be to add a route to the app like so (since the server will be blocking, and therefore not allow any command-line input on the same process):
"use strict";
const http = require("http"),
path = require("path"),
express = require("express"),
PORT = 8000;
var app = express();
function loadConfigFile(callback) {
var configFile = false;
try { // load config file if it exists
configFile = require(path.join(__dirname, "config.json"));
} catch(e) { // ignore if the config file doesn't exist
if (e.code !== "MODULE_NOT_FOUND") {
throw e
}
}
callback(configFile); // argument will be false if file does not exist
}
function authenticateAdminUser(req) { return true; } // dummy logic
app.get("/admin/reload", function(req, res, next) {
if (!authenticateAdminUser(req)) {
let err = new Error("Forbidden");
err.StatusCode = 403;
return next(err);
}
loadConfigFile(function(file) {
if (file) {
// do something with the file here
}
res.send("Reloaded config file successfully.");
});
});
loadConfigFile(function(file) {
if (file) {
// do something with the file here
}
app.listen(PORT, function() {
console.log("Server started on port " + PORT + ".");
});
});

Related

Automatic exit - NodeJS https webserver

To elaborate on the question in the title,
I have made a simple app with js that runs on a node server. I have a thumbdrive that contains a folder and a start.bat file. Start.bat, as the name implies, switches the directory to my server folder and starts the server. Start.bat also starts another process that opens the edge browser to localhost in kiosk mode. When a user starts start.bat, the app will appear on the screen with the server running in the background. When the user exits the edge browser, they are then required to CTRL + C out of the server cmd prompt to properly shut down the server.
I need a system which effectively automatically shuts down the server after the Edge browser has been closed. I am not sure if it is possible to tie together the closing of the browser and the node server and am yet to find a solution online. If anyone has any ideas regarding possible fixes to my problem I would love to hear it!
https-server.js
const https = require("https");
const path = require("path");
const fs = require("fs");
const ip = require("ip");
const process = require("process");
const app = express();
const port = 443;
process.chdir("..");
console.log("Current working dir: " + process.cwd());
var rootDir = process.cwd();
//determines what folder houses js, css, html, etc files
app.use(express.static(rootDir + "/public/"), function (req, res, next) {
const ip = req.ip;
console.log("Now serving ip:", "\x1b[33m", ip, "\x1b[37m");
next();
});
//determines which file is the index
app.get("/", function (req, res) {
res.sendFile(path.join(rootDir + "/public/index.html"));
});
var sslServer = https.createServer(
{
key: fs.readFileSync(path.join(rootDir, "certificate", "key.pem")),
cert: fs.readFileSync(path.join(rootDir, "certificate", "certificate.pem")),
},
app
);
//determines which port app (http server) should listen on
sslServer.listen(port, function () {
console.log(
"Server has successfully started, available on:",
"\x1b[33m",
ip.address(),
"\x1b[37m",
"listening on port:",
"\x1b[33m",
+port,
"\x1b[37m"
);
console.log("CTRL + C to exit server");
sslServer.close();
});
Will provide any needed information.
Have an endpoint registered to exit the process
app.get('/shutdown', (req, res, next) => {
res.json({"message": "Received"});
next();
}, () => {
process.exit();
});
Then register a listener for onbeforeunload to do a request to this endpoint.
let terminateCmdReceived = false;
async function shutdown(e) {
let response;
if (!terminateCmdReceived) {
e.preventDefault();
try {
response = await fetch('http://localhost:3000/shutdown');
const json = await response.json();
if(json.message === "Received") {
terminateCmdReceived = true;
window.close();
}
} catch (e) {
console.error("Terminate Command was not received");
}
}
}
window.onbeforeunload = shutdown

Loopback: How to access a model from a script?

I am using Loopback and want to persist data to the database through a script.
I have written a custom command which I will be running through a cronjob:
'use strict';
var loopback = require('loopback');
var app = module.exports = loopback();
var boot = require('loopback-boot');
app.start = function() {
return app.listen(function() {
const baseUrl = app.get('url').replace(/\/$/, '');
console.log('Web server listening at: %s', baseUrl);
let Dish = app.models.dish;
console.log(Dish);
})
}
boot(app, __dirname, function(err) {
if (err) throw err;
// start the server if `$ node server.js`
if (require.main === module)
app.start();
});
The output I get is:
Web server listening at: http://localhost:3000
undefined
How do I access the dish model?
You're not calling the boot function
https://github.com/strongloop/loopback-boot
The loopback-boot module initializes (bootstraps) a LoopBack
application. Specifically, it:
Configures data-sources.
Defines custom models Configures models and attaches models to data-sources.
Configures application settings
Runs additional boot scripts, so you can put custom setup code in multiple small files instead of in the main application file.
Your server js likely contains something similar to this
var boot = require('loopback-boot');
app.start = function() {
return app.listen(function() {
const baseUrl = app.get('url').replace(/\/$/, '');
console.log('Web server listening at: %s', baseUrl);
if (app.get('loopback-component-explorer')) {
const explorerPath = app.get('loopback-component-explorer').mountPath;
console.log('Browse your REST API at %s%s', baseUrl, explorerPath);
}
})
}
boot(app, __dirname, function(err) {
if (err) throw err;
// start the server if `$ node server.js`
if (require.main === module)
app.start();
});
You need these to init the app. You might be able to get away with only calling boot, but I think app.start is the one which gets your datasources connected.
Build your command script like this:
let app = require('./server/server') // Set the path according on the location of your command script
app.models.YOUR_MODEL // Access the model
You can access your models in loopback as below:
First of all require server.js file in your current file.
const app = require('YOUR_SERVERJS_FILE_PATH');
const MY_MODEL = app.models.YOUR_MODEL_NAME
Here, YOUR_MODEL_NAME will be as same as in name value of YOUR_MODEL.json file.
Hope you will get my point.
Thank you.

File-upload error using Cordova FileTransfer to NodeJS

I'm currently working on a Phonegap app, and I would like users to be able to upload any file to my NodeJS server.
I've looking all around the web but I just can't get anything to work...
Here is the code I'm using for the Phonegap controller:
$scope.open = function()
{
navigator.camera.getPicture(upload,
function(message)
{
alert('get picture failed');
},
{
quality: 50,
destinationType: navigator.camera.PictureSourceType.FILE_URI,
sourceType: navigator.camera.PictureSourceType.PHOTOLIBRARY,
mediaType: navigator.camera.MediaType.ALLMEDIA
});
}
var win = function (r) {
$scope.log = "Code = " + r.responseCode;
$scope.log2 = "Response = " + r.response;
$scope.log3 = "Sent = " + r.bytesSent;
$scope.$digest();
}
var fail = function (error) {
$scope.log = "An error has occurred: Code = " + error.code;
$scope.log2 = "upload error source " + error.source;
$scope.log3 = "upload error target " + error.target;
$scope.$digest();
}
function upload(fileURI)
{
$scope.log = fileURI;
$scope.$digest();
var options = new FileUploadOptions();
options.fileKey = "file";
options.fileName = fileURI.substr(fileURI.lastIndexOf('/') + 1);
options.mimeType = "text/plain";
options.chunkedMode = false;
var params = {};
params.value1 = "test";
params.value2 = "param";
options.params = params;
var ft = new FileTransfer();
ft.upload(fileURI, "http://192.168.192.111:2999/upload", win, fail, options);
}.
Here is the current code for the NodeJS server, have tried a lot of different things, all without success:
var express = require('express');
var http = require('http').Server(express);
var io = require('socket.io')(http);
var fs = require('fs');
var multer = require('multer');
var app = new express();
app.post('/upload', multer({dest: './uploads/'}).single('upl'), function(req, res)
{
console.log(req.body);
console.log(req.file);
})
http.listen(2999, function(){
console.log('listening on *:2999');
});
In the app I used to get errors that FileUploadOptions etc weren't defined, but I fixed that by adding them to the cordova project.
Furthermore, I use ionic 1, if that helps anyone out.
I do keep constantly getting the error code 1 (upload error source), even though I selected a real file and I saw that the link was correct (something like /storage/0/emulated/Downloads on my Android device).
Also, sometimes it gives me error 3 as well (upload target source), some sort of server not found issue I think.
Is there something obvious I'm doing wrong and how would I be able to fix it? Is there a handier way, since I eventually want to link this to a MySQL database.
Thanks in advance!
Well found my answer (a while ago, this is for people stumbling across this post).
You can first try whether your JS works by changing the server to https://posttestserver.com/post.php. If you see an upload appearing there, there's a problem with the server.
The problem with me was that I didn't let Apache through the firewall at all, so uploads from anything besides my PC would fail...
var express=require('express');
var bodyParser=require('body-parser');
var formidable = require('formidable');
var util = require('util');
var app=express();
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
var path=require('path');
var mysql =require('mysql');
var fs=require('fs');
app.use('/public',express.static(path.join(__dirname, 'public')));
var connection = mysql.createConnection({
host: 'localhost',
user: 'root',
password : '',
port : 3306, //port mysql
database:'xxxxx'
});
app.post('/data', function(req, res) {
// create an incoming form object
var form = new formidable.IncomingForm(),
files = [],
fields = [];
// specify that we want to allow the user to upload multiple files in a single request
form.multiples = true;
// store all uploads in the /uploads directory
form.uploadDir = path.join(__dirname, '/public/images/uploads');
// every time a file has been uploaded successfully,
// rename it to it's orignal name
form.on('file', function(field, file) {
if (path.extname(file.name)=='') {
extension='.jpg';
}
else{
extension=path.extname(file.name);
}
var oldpath = file.path;
form.uploadDir = path.basename(file.name,extension).concat((Math.random()* 100),extension);
var newpath = './public/images/uploads/'+ path.basename(file.name,extension).concat((Math.random()* 100),extension);
//fs.rename(file.path, path.join(form.uploadDir, file.name));
fs.rename(oldpath, newpath);
});
form.on('field', function(field, value) {
fields[field] = value;
});
// log any errors that occur
form.on('error', function(err) {
console.log('An error has occured: \n' + err);
});
// once all the files have been uploaded, send a response to the client
//Call back at the end of the form.
form.on('end', function () {
res.writeHead(200, {
'content-type': 'text/plain'
});
res.write('received the data:\n\n');
// console.log(fields.name+'-'+fields.nickname);
var values={
name:fields.name,
nickname:fields.nickname,
email:fields.email,
password:fields.password,
dob:fields.dob,
gender:fields.gender,
phoneno:fields.phone
};
connection.query('INSERT INTO users SET ?', values, function(err,req,res){
if(err){
console.log('Connection result error '+err);
}
else{
console.log('Success');
}
});
res.end();
});
// parse the incoming request containing the form data
form.parse(req);
});
//app.use(app.router);
app.listen(5000);

not able to export variable( var app = express()) in nodejs using express

I need help in trying to solve this scenario
I have a file web.js. Over there I have
var express = require("express");
var app = express();
var web2 = require("./web2");
/* Code the start the server on the required port*/
app.get('/param1', function(req, res){
console.log("INSIDE GET METHOD OF WEB.JS");
});
module.exports.app = app
I have another file web2.js. over there I have
var web = require("./web");
app = web.app;
app.get('/param2', function(req, res){
console.log("INSIDE GET METHOD OF WEB2.JS");
});
While starting I get an error
TypeError: Cannot call method 'post' of undefined
If I remove the line 3 from web.js -- I am able to start the server, but a request for http:///param2 gives a 404
Updated scenario:
I am using pg database and I try to create a client that keeps an instance of the client(in web.js). I then pass this to other file(web2.js). In web.js I always get this client as null
in web.js I have the following code
var pg = require("pg");
var pgclient;
app.get('*', function(req,res,next){
pg.connect(process.env.DATABASE_URL, function(err, client, done) {
if(client != null){
pgclient = client;
console.log("Client connection with Postgres DB is established");
next();
}
}
}
require("./web2.js")(app, pgclient);
in web2.js, I have the following code
module.exports = function(app, pgclient){
app.get('/param1', function(req,res){
if(pgclient != null){
}
else{
res.send(500, "pgclient is NULL");
}
});
}
The code never reaches the if block(if(pgclient != null)) in web2.js
The problem is the cyclic dependency between web.js and web2.js. When web2.js requires web.js, web.js's module.exports hasn't been set yet. I would rather do something like this:
web.js
var express = require("express");
var app = express();
app.get("/param1", function (req, res) {
// ...
});
require("./web2")(app);
app.listen(/* port number */);
web2.js
module.exports = function (app) {
app.get("/param2", function (req, res) {
// ...
});
};

What is the correct way to stream an index page to a client?

In most examples you find on the web, an index.html file is served like the following:
function serveIndexPage(response) {
fs.readFile('__dirname + /public/index.html', function (err, data) {
response.end(data);
});
};
This seems like a bad idea, as the whole file is read into memory and then send to the client. Is there some better way to do this? I know that libaries like Connect and Express provide such a functionality, but for my project, I'd like to use plain node.js.
EDIT
Also, you sometimes see readFileSync used, which is even worse IMHO.
Pipe your data through, so a simple static HTTP server looks like:
var Http = require('http'),
Url = require('url'),
Path = require('path'),
Fs = require('fs');
Http.createServer(function(req, res) {
var path = Url.parse(req.url).pathname;
var file = Path.join(process.cwd(), path);
path.exists(filename, function(exists) {
if(!exists) {
res.writeHead(404);
res.end();
}
res.writeHead(200, /* mime type */);
var fileStream = Fs.createReadStream(filename);
fileStream.pipe(res);
});
}).listen(process.env.PORT || 1999);
The pipe'ing is shorthand for something like
var s = Fs.createReadStream(filename);
s.on('data', function (data) {
res.write(data);
});
s.on('end', function() {
res.end();
});
In theory you could read the file line by line response.write()'ing every line to the client.

Resources