node js localhost shal open a file - node.js

Tutorial: I want to open a file via localhost, but I don't know which path I have to type into the Browser. Is localhost, where my server.js is located? (sorry, I'm new to programming and node)
Tutorial-Code
var path = require('path'),
fs = require('fs');
require('http').createServer(function(req, res) {
var file = path.normalize(req.url);
console.log(file);
path.exists(file, function(exists) {
if (exists) {
fs.stat(file, function(err, stat) {
var rs;
if (err) { throw err; }
if (stat.isDirectory()) {
res.writeHead(403);
res.end('Forbidden');
} else {
rs = fs.createReadStream(file);
res.writeHead(200);
rs.pipe(res);
}
});
} else {
res.writeHead(404);
res.end('Not found');
}
})
}).listen(4000);

request.url is normally /something/like/an/absolute/path unless you get requests from a HTTP proxy client (which adds http://... prefix to request.url) or make some custom HTTP requests.
Anyways path.normalize only takes care of .. And .s.
Your code will let anybody access any file (accessible by the account in which node process is running) on your computer.
A better/safer practice is to join __dirname with decoded request.url and check if resolved path starts with the absolute path (with trailing path separator) of the directory you want to serve static content from:
var scriptDir = path.resolve(__dirname + path.sep + "static" + path.sep),
requestPath = decodeURIComponent(request.url);
requestPath = path.resolve(path.join(__dirname, "static", requestPath));
if (requestPath.indexOf(scriptDir) === 0) {
// serve the file
} else {
response.writeHead(403);
response.end(http.STATUS_CODES[403]);
}
Now if you request say, http://localhost:4000/index.html it should serve the file located in /path/to/your/node/app/dir/static/index.html

Related

browser-sync disable directory browsing for some sub-directories

I am new to use of browser-sync and gulp but according to usage i am trying to make my http-server accessible via webserver. Additionally i want to exclude some directories to be hidden or not to be browsed by using negation on files attribute but it does not work... My main goal is to define some directories to give 404 as always whatever is requested from them...
Can someone please check; if possible, here is my gulp implementation:
var gulp = require('gulp');
var browserSync = require('browser-sync').create();
var files = ['d2cvib/output/**/*.{xml}','!d2cvib/changed-list/**'];
// Static server
gulp.task('browser-sync', function() {
browserSync.init({files,
port: 8203,
server: {
baseDir: "/mule_local_exchange/d2c/",
middleware: [
function(req, res, next) {
const user = 'd2c';
const pass = 'd2cweb';
let authorized = false;
// See if authorization exist in the request and matches username/password
if (req.headers.authorization) {
const credentials = new Buffer(req.headers.authorization.replace('Basic ', ''), 'base64').toString().split(/:(.*)/)
if (credentials[0] === user && credentials[1] === pass) {
authorized = true;
}
}
if (authorized) {
// Proceed to fulfill the request
next();
} else {
// Authorization doesn't exist / doesn't match, send authorization request in the response header
res.writeHead(401, {'WWW-Authenticate': 'Basic realm="Authenticate"'})
res.end();
}
}
],
directory: true
}
});
});
I could not disable directory listing; but work-aroundly i've disabled response code if some of the directories have been asked by HTTP GET, not %100 clean solution but works for my case:
var gulp = require('gulp');
var browserSync = require('browser-sync').create();
var cache = require('gulp-cache');
//For conditions of rest-uri patterns
function buildSearch(substrings) {
return new RegExp(
substrings
.map(function (s) {return s.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');})
.join('{1,}|') + '{1,}'
);
}
gulp.task('clear-cache', function() {
// Or, just call this for everything
cache.clearAll();
});
// Static server
gulp.task('browser-sync', function () {
browserSync.init({
port: 8203,
server: {
baseDir: "/mule_local_exchange/d2c/",
middleware: [
function (req, res, next) {
const user = 'd2c';
const pass = 'd2cweb';
var pattern = buildSearch(['changed-list','current', 'changed_list']);
let authorized = false;
// See if authorization exist in the request and matches username/password
if (req.headers.authorization) {
const credentials = new Buffer(req.headers.authorization.replace('Basic ', ''), 'base64').toString().split(/:(.*)/)
if (credentials[0] === user && credentials[1] === pass) {
authorized = true;
}
}
if (authorized) {
if (pattern.test(req.url)) { //400 for not required directories
res.writeHead(400, {'Response':'Bad-request'})
res.end();
} else { // Proceed to fulfill the request
next();
}
} else {
// Authorization doesn't exist / doesn't match, send authorization request in the response header
res.writeHead(401, {
'WWW-Authenticate': 'Basic realm="Authenticate"'
})
//res.send(401,{ 'Authentication' : 'Failed' })
res.end();
}
}
],
directory: true
}
});
});
That part does the job:
if (pattern.test(req.url)) { //400 for not required directories
res.writeHead(400, {'Response':'Bad-request'})
res.end();
}
You can define multiple baseDirs for browsers-sync and the directory listing will only be available for the first one:
baseDir: ["/mule_local_exchange/d2c/PUBLIC", "/mule_local_exchange/d2c/", "/some/other/dir"],
directory: true
In this example, directory listing will only show contents of /mule_local_exchange/d2c/PUBLIC". All files will still be available from all the dirs.

Express request no longer available on fileStream.on('close')

I have an implementation of a POST router in node.js with express. The body of the post is a list of files, and the idea is that these files should be zipped and a link to the resulting zip file should be returned. I'm trying to use 'express-session' to store the progress in terms of number of files zipped as well as the file name of the zip upon completion. Updating the number of files zipped (req.session.current) works fine, but for some reason I can't set the file name (req.session.zipFile) in the session when the fileStream closes. I'm suspecting that the req object is no longer valid by the time fileStream.on('close') is reached, but I'm not sure how to handle that. I can't access the session data without a request.
This is the complete router implementation:
const express = require('express');
const router = express.Router();
const fs = require('fs');
const ZIP_FILES_PATH = require('../constants');
router.post('/', function(req, res, next){
const json = JSON.parse(req.body.data);
const path = json["base"];
if (!path || path.length === 0){
res.render("error", {msg: "JSON missing 'base'"})
return;
}
const files = json["files"];
if (!files || files.length === 0){
res.statusCode = 400;
res.send("JSON missing 'files'");
return;
}
if (!fs.existsSync(path)) {
res.statusCode = 400;
res.send("Directory '" + path + "' does not exist");
return;
}
try{
var sessData = req.session;
sessData.total = files.length ;
sessData.current = 0;
sessData.zipFile = '';
zipFiles(path, files, req, res);
}catch(error){
res.statusCode = 500;
res.send("The files could not be zipped");
return;
}
res.render("zipping")
});
module.exports = router;
const zipFiles = (path, files, req, res) => {
const zipFile = require('crypto').createHash('md5').update(path).digest("hex") + "_" + new Date().getTime() + ".zip";
const archiver = require('archiver');
const fileStream = fs.createWriteStream(ZIP_FILES_PATH + zipFile);
const archive = archiver('zip', {
gzip: true,
zlib: { level: 9 }
});
archive.on('error', function(err) {
throw err;
});
fileStream.on('close', function() {
req.session.zipFile = "/download?file=" + zipFile; //Doesn't stick!
});
archive.pipe(fileStream);
files.map(file => zipSingleFile(path, file, req, archive));
archive.finalize();
}
const zipSingleFile = (path, file, req, archive) => {
fs.existsSync(path + "/" + file) ? archive.file(path + "/" + file, { name: file }) : null;
req.session.current = req.session.current + 1;
}
zipFiles() is asynchronous. That means it doesn't block and finishes some time later. So, in the flow of your request handler, you call
res.render("zipping")
before zipFiles() finishes. And, res.render() will send the response and close the output stream.
If you want to wait to call res.render() until you're done with the zipFiles() call, then you need to either call it from within zipFiles() when it's all done or you need to add a callback to zipFiles() so it can communicate back when it's done.
Also, is there any reason you can't set req.session.zipFile = "/download?file=" + zipFile; before you call res.render()?
Also, depending upon how you have your session configured (which you don't show us any code for), just doing this:
req.session.zipFile = "/download?file=" + zipFile;
may not be enough to actually save that value into the session. You may also have to call .save() on the session.
And, you are setting:
req.session.zipFile
at the end of your zipping process so any requests that arrive from that session before your zipping finishes won't see that session variable set either (this seems like a concurrency problem to me).

Looking for a cleaner way to run NodeJS as a service with absolute paths

I have a NodeJS project that starts with server.js under /opt/battleship. If I cd to that dir and type node server, everything works correctly, but if I sit in /root and use an absolute path to start it up, such as node /opt/battleship/server, then everything I serve from its ./lib and ./public subdirs gets a 404 response. This is a problem in production because my /etc/init/battleship.conf script specifies starting the process on boot with
exec /usr/local/bin/node /opt/battleship/server.js >> /var/log/battleship.log 2>&1
It runs, but then I get the 404 errors. If I put the line
cd /opt/battleship
just above that in the battleship.conf file, I don't get 404 errors, I get all my files correctly, but it seems as if using cd in a .conf file is messy and prone to errors. Correct me if I'm wrong, this is my first handwritten .conf file. Is there a better way to have my server.js server up files correctly from its ./lib and ./public subdirectories? Below is my server.js file for reference:
var PORT = 3000;
var requirejs = require('requirejs');
var http = requirejs('http');
var fs = requirejs('fs');
var path = requirejs('path');
var mime = requirejs('mime');
var cache = {};
requirejs.config({
baseUrl: __dirname,
nodeRequire: require,
packages: [
{name: 'ship', location: 'public/js/lib/ship'},
{name: 'GameState', location: 'public/js/lib', main: 'GameState'}
]
});
requirejs(['./lib/battleship_server'], function(battleship_server) {
function send404(response) {
response.writeHead(404, {'Content-Type': 'text/plain'});
response.write('Error 404: response not found.');
response.end();
}
function sendFile(response, filePath, fileContents) {
response.writeHead(
200,
{'Content-Type': mime.lookup(path.basename(filePath))}
);
response.end(fileContents);
}
function serveStatic(response, cache, absPath) {
if (cache[absPath]) {
sendFile(response, absPath, cache[absPath]);
} else {
fs.exists(absPath, function(exists) {
if (exists) {
fs.readFile(absPath, function(err, data) {
if (err) {
send404(response);
} else {
cache[absPath] = data;
sendFile(response, absPath, data);
}
});
} else {
send404(response);
}
});
}
}
var server = http.createServer(function(request, response) {
var filePath = false;
if (request.url === '/') {
filePath = 'public/index.html';
} else {
filePath = 'public' + request.url;
}
var absPath = './' + filePath;
serveStatic(response, cache, absPath);
});
server.listen(PORT, function() {
console.log('Server listening on port ' + PORT + '.');
});
battleship_server(server);
});
Use __dirname to get the folder of the script.
For example, if you want to get index.html, and it's in the directory of the script, use this:
var path = require('path');
var pathOfIndex = path.join(__dirname, "index.html");

How to download a file from server to local sysytem desktop in node.js?

I have a file which I download to our desktop for all type of operating
system like windows, linux, mac etc. The below code I am using for download it from third server to my serve folder:
var fileticketID = req.params.fileticketID;
var download = function(uri, filename, callback){
BASE.request.head(uri, function(err, res, body){BASE.request(uri).pipe(BASE.FS.createWriteStream('public/images/'+filename)).on('close', function(err) {
var stats = BASE.FS.statSync('public/images/'+filename);
var fileSizeInBytes = stats["size"];
var fields = {filename : filename,size:fileSizeInBytes};
var fieldsData = new downloadFile(fields);
fieldsData.save(function(err, data){
if(!err)
{
if(fileSizeInBytes==0)
{
resMain.send('error');
}
else
{
resMain.send(filename);
}
//console.log('successfully saved -- ');
}
else
{
resMain.send('error');
//console.log('sorry error ', err);
}
});
});
Now I want to file is also downloaded in desktop

Express.js throws Error: ENOENT on linux server

So, I have been working on an upload script (code is below) that I have working on my windows machine, but on my Linux server it seems to fail with
Error: ENOENT, open 'uploads/da5ab67b48ea2deecd25127186017083'
I understand that the error is saying there is no path/file, but I do check to for file existence before I attempt the file rename.
exports.product_files_upload = function (req, res) {
if (req.files) {
var tmpArray = [];
for(var file in req.files){
console.log(file)
if (file){
var splitName = file.split('-');
if (splitName.length > 1) {
var username = splitName[0];
var productId = splitName[1];
var index = splitName[2];
} else {
return;
}
} else {
return;
}
//console.log(username)
//console.log(productId)
var tmp_path = req.files[file].path;
console.log(fs.existsSync(tmp_path))
createUserDir();
createProductDirs(username, productId);
//console.log(__dirname)
var target_path = './public/user_files/'+ username + '/products/'+productId+'/files/' + req.files[file].name,
save_path = 'user_files/'+ username + '/products/'+productId+'/files/' + req.files[file].name;
if (fs.existsSync(target_path)) {
tmpArray.push({exists:true, name:req.files[file].name});
} else {
tmpArray.push({
size: req.files[file].size,
type: req.files[file].type,
name: req.files[file].name,
path: save_path,
exists:false
});
if (fs.existsSync(tmp_path)) {
fs.rename(tmp_path, target_path, function(err) {
if (err) throw err;
// delete the temporary file, so that the explicitly set temporary upload dir does not get filled with unwanted files
fs.unlink(tmp_path, function() {
if (err) throw err;
// res.send(save_path);
});
});
} else {
tmpArray.push({
size: req.files[file].size,
type: req.files[file].type,
name: req.files[file].name,
path: save_path,
exists:false
});
}
}
}
res.json(tmpArray);
}
};
UPDATE: I am using forever for running my express app, and when I stopped my app forever process and switched to just "node app.js" the problem was fixed. This is not an acceptable fix. I am thinking there might be a problem with forever.
Okay, I figured it out. I was using forever with an absolute path to launch my app on my linux box like
forever start /path/to/app.js
But when I cd into the app directory and then launch the app it works.
forever start app.js

Resources