I have added the following
var express = require('express');
var path = require('path');
var favicon = require('serve-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var fs = require("fs");
var index = require('./routes/index');
var users = require('./routes/users');
var app = express();
var basicAuth = require('express-basic-auth');
app.use(basicAuth({
users: { 'worker': '????????' }
}));
to my app.js. Depending on place, where I put this code, the site either stops work at all (displaying white space) or works without any auth.
Which is the correct place to put this code?
When white space, in Chrome Developer tools can be seen
Failed to load resource: the server responded with a status of 401 (Unauthorized)
but no login popup appears.
I am running my site on localhost under WebStorm. May be no popups is default browser behavior on localhost? Firefox does the same
With basic-auth, I use following code (in app.js or if you want this only on certain routes, then before those routes only):
var auth = require('basic-auth');
app.use(function(req, res, next){
var user = auth(req);
if (user === undefined || user['name'] !== 'mike' || user['pass'] !== 'mike123') {
res.statusCode = 401;
res.setHeader('WWW-Authenticate', 'Basic realm="NodeJS"');
res.end('Unauthorized');
} else {
next();
}
});
By using express-basic-auth module, default configuration of the middleware does not add a WWW-Authenticate challenge header to responses of unauthorized requests. You must enable it by adding challenge: true to the options object as follows:
app.use(basicAuth({
users: { 'worker': '????????' },
challenge: true
}));
This piece of code will allow most browsers to show a popup to enter username/password on unauthorized responses.
Related
We first developed REST api's using node.js and have that running on a VPS. We now have developed an Angular web app to display data that comes in via a mobile app that calls some of the REST api's and then updates the data by calls back to other REST API's. Running the Angular app on the localhost was able to successfully call the REST api's.
We want to combine both applications on the same server. After searching around it seemed that we could add commands to the REST api server.js to pass urls that didn't meet the REST api path to Angular. Code snippet is below:
// API verison
var apiVersion = '/v1'
var fs ;
var https ;
// Dependencies
var express = require('express');
var mongoose = require('mongoose');
var bodyParser = require('body-parser');
// MongoDB
...
// Express
var app = express();
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
// Routes
app.use(apiVersion, require('./routes/api'));
// Start server
fs = require('fs')
https = require('https')
https.createServer({
key: fs.readFileSync('...'),
cert: fs.readFileSync('...')
}, app)
.listen(443, function () {
console.log('HTTPS Server running on default port 443')
});
// Pass request to angular?
app.use(function(req, res) {
var path = require('path');
res.sendfile(path.resolve('/home/.../index.html')); // but not getting anything
});
The REST api's still work but when directing a browser to 'mydomain.net' I just get a blank page. I don't see any errors on the node/error logs.
Any ideas?
You can do something like this. Use static content to send from the dist folder and rest will work fine. I have added two reference in case you might need them to refer.
var apiVersion = '/v1'
const fs = require('fs')
const https = require('https');
const path = require('path')
// Dependencies
const express = require('express');
const mongoose = require('mongoose');
const bodyParser = require('body-parser');
// MongoDB
...
// Express
const app = express();
app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json());
// Pass request to angular?
app.use('path', express.static(path.resolve('your path should be correct')));
// Routes
app.use(apiVersion, require('./routes/api'));
// Start server
https.createServer({
key: fs.readFileSync('...'),
cert: fs.readFileSync('...')
}, app)
.listen(443, function () {
console.log('HTTPS Server running on default port 443')
});
Check here1 or here2 for more details, there are simple steps to follow.
I am trying to start my project via launching server.js but I am getting error:"cannot GET /"
even after I made an app.get() route in my server.js
I am using also "body-parser" as a middleware, and "cors"
server.js:
// Setup empty JS object to act as endpoint for all routes
const projectData = {};
// Require Express to run server and routes
const express = require('express');
// Start up an instance of app
const app = express();
/* Middleware*/
//Here we are configuring express to use body-parser as middle-ware.
const bodyParser = require('body-parser')
app.use(bodyParser.urlencoded({ extended: false }));
app.use(bodyParser.json());
// Cors for cross origin allowance
const cors = require('cors');
app.use(cors());
// Initialize the main project folder
app.use(express.static('views'));
const port = 8080;
app.use(express.static('dist'));
// Setup Server
const server=app.listen(port, ()=>{console.log(`running on localhost: ${port}`)});
app.get('/all', sendData);
function sendData (request, response) {
response.send(projectData);
};
// TODO-ROUTES!
app.post('/add', Info);
function Info(req, res) {
projectData['date'] = req.body.date;
projectData['temp'] = req.body.temp;
projectData['content'] = req.body.content;
res.send(projectData);
}
module.exports = server;
I made a POST request to /add and it works. Then I call GET /all and also work. The error cannot GET / is because you are requesting an endpoint that not exists.
Just add a dummy data to your Fn() if you want to see some output on GET request wihtout making any post request
Here is my solution
app.get('/', (req, res) => {
res.redirect('/all');
})
we need this because you do not have any root URL set, so we set the root URL to redirect to /all
2nd step then we add a dummy data in the projectData
var projectData = {
date = new Date().getDate();,
temp = 10,
content="This is Test Content"
}
This way when you call 'http://localhost:8080/' you shall get redirected to http://localhost:8080/all and instead of empty {} you shall see the dummy data.
I am working on fixing my Cloudfront (CF) Cookie Setup within my ExpressJS application and have had on and off success with the cookies successfully being set and recognized by my Cloudfront distribution, but I noticed that when I am unsuccessful (Bulk of the time) with accessing content from the distribution, it seems to potentially be due to the CF Request Cookies having an N/A value for Domain, Path and Expires. How do Request Cookies work? Within ExpressJS, there is the ability to Set-Cookie, which is what I do and is correctly displayed in the Response Cookies section, but I'm not sure what is causing the Request Cookies to appear in the start they do. I also noticed the CloudFront-Signature cookie is different between the request and response cookie. Could this cause this problem?
Screenshot of my HTTP request on page load
Code setting cookie:
var express = require('express');
var router = express.Router();
var session = require('express-session');
var passport = require('passport');
var crypto = require('crypto');
var moment = require('moment');
var path = require('path');
var fs = require('fs');
var cf = require('aws-cloudfront-sign');
var metaTags = require('./meta-routes');
router.use('/app', isLoggedIn, require('./app-routes'));
var cfPK = path.join(__dirname + process.env.CF_PRIVATE_KEY);
var cfOptions = {
keypairId: process.env.CF_KEY_ID,
expireTime: null,
privateKeyPath: cfPK
}
var signedCookies = cf.getSignedCookies(process.env.CF_CNAME, cfOptions);
function isLoggedIn(req, res, next) {
if (req.isAuthenticated())
for(var cookieId in signedCookies) {
res.cookie(cookieId, signedCookies[cookieId], { httpOnly: true, domain: process.env.DOMAIN_NAME || 'localhost', secure: true });
}
return next();
res.redirect('/login');
}
I am trying to run a simple node application using nide modules and testing it using the Advance Rest Client.
The console is not showing any error.
But I am not getting anything in the output.
While running this on ARC, I am getting : Cannot /GET data
Text version of the code:
MainFile:
var express = require('express');
//var morgan = require('morgan');
var bodyparser = require('body-parser');
var hostname = 'localhost';
var port = '3000';
var app = express();
//app.use(morgan('dev'));
var dishRouter = express.Router();
dishRouter.use(bodyparser.json());
var allDishes = require('./dishRouter');
//For all dishes
dishRouter.route('/dishes')
.get(allDishes.dishesGet)
.delete(allDishes.dishesDelete)
.post(allDishes.dishesPost)
;
//For specific dishesDelete
dishRouter.route('/dishes/:dishid')
.get(allDishes.dishSpecificGet)
.delete(allDishes.dishSpecificDelete)
.put(allDishes.dishSpecificPut)
;
app.listen(port,hostname,function(){
console.log('server runing properly');
});
dishRouter file:
console.log('in dishrouter file');
module.exports.dishesGet = function(req,res,next){
console.log('inside GET');
res.end('Will be displaying all the dishes');
};
module.exports.dishesDelete = function(req,res,next){
res.end('Will delete all the dishes');
};
module.exports.dishesPost = function(req,res,next){
res.end('will add the new dishes');
};
module.exports.dishSpecificGet = function(req,res,next){
res.end('displaying the specific dish :'+req.params.dishid);
};
module.exports.dishSpecificDelete = function(req,res,next){
res.end('Will delete the specific dish with id : '+req.params.dishid);
};
module.exports.dishSpecificPut = function(req,res,next){
res.write('will update the specific dish :'+req.params.dishid);
res.end('Updating the dish with values as name : '+req.body.name);
};
According to body-parser docs
Looks like your router is a bit broken here:
dishRouter.use(bodyParser.json())
Try switching this to:
app.use(bodyParser.json())
And I can recommend creating router in the file, where you write handlers and just export router.
UPDATE:
Here is what you forgot:
app.use(dishRouter)
When calling express.Router() you're just creating an instance of the router, but you have to connect it to the express application instance.
I'm pulling up a project for which I swear this was not a problem before, but apparently is not right now -- I'm probably doing something stupid. I'm seeing express & socket.io create two different "sid" cookies, one with a path of "/" and the other with a path of "/socket.io". The behavior I'm expecting is to share the same cookie/session between my express app & socket.io.
"sid" cookie for "/":
"sid" cookie for "/socket.io":
I'm setting up express via:
var config = require('config');
var express = require('express');
var cookieParser = require('cookie-parser');
var session = require('express-session');
var sessionStore = require('./session-store');
var sessionConfig = {
store : sessionStore,
secret : config.server.sessionSecret,
key : config.server.sessionKey,
saveUninitialized : true,
resave : true,
cookie : { secure: config.server.useHTTPS }
};
module.exports = function (app) {
app.use(cookieParser(config.server.sessionSecret));
app.use(session(sessionConfig));
};
I'm setting up socket.io via:
var config = require('config');
var redis = require('socket.io-redis')(config.redis.socket);
var cookieParser = require('socket.io-cookie-parser');
var sessionStore = require('./session-store');
module.exports = function (io) {
io.adapter(redis);
io.use(cookieParser(config.server.sessionSecret));
io.use(authorization);
};
function authorization (socket, next) {
var unauthorized = new Error('Unauthorized');
if (!socket.request.headers.cookie) {
return next(unauthorized);
}
var sessionKey = socket.server.engine.cookie;
var sessionId = socket.request.signedCookies[sessionKey] || socket.request.cookies[sessionKey];
if (!sessionId) {
return next(unauthorized);
}
sessionStore.get(sessionId, function (err, session) {
// use session's userId to fetch user & attach to socket
});
}
These two files are tied together from my main server file:
var http = require('http');
var express = require('express');
var socketio = require('socket.io');
var config = require('config');
var app = express();
var server = http.Server(app);
var io = socketio(server, {
cookie: config.server.sessionKey
});
// initialize aspects of the app
require('./config/initializers/io')(io);
require('./config/initializers/express')(app);
module.exports = server;
Okay, I think I solved it. It looks like supplying the cookie option when mounting socket.io atop my server ends up causing engine.io to set a cookie with the same name based upon these lines of code:
if (false !== this.cookie) {
transport.on('headers', function(headers){
headers['Set-Cookie'] = self.cookie + '=' + id;
});
}
According to RFC-2109 HTTP State Management Mechanism, the default path is the current URL path:
Path Defaults to the path of the request URL that generated the
Set-Cookie response, up to, but not including, the
right-most /.
That would explain the new cookie being created since socket.io's endpoint is /socket.io by default. Since I'm using custom authorization that reads a cookie anyway, I figure it's safe to disable cookies in engine.io by changing my socket instantiation to the following:
var io = socketio(server, {
cookie: false
});
This now breaks my authorization function included in the original question, specifically this line:
var sessionKey = socket.server.engine.cookie;
Since I'm no longer passing the cookie key through to socket.io/engine.io, I instead need to read straight from my config:
var sessionKey = config.server.sessionKey;