node.js wait for file operation to complete - node.js

I am brand new at node.js and I need to write a web server that takes data and write it to a file. that file is then used in an external program.
my problem is that node.js executes the external program before the data is completely written to the file. How can I wait for it to complete before executing the external program
var http = require('http')
var map = require('through2-map')
var fs = require('fs')
var str = ""
var sh = require('execSync');
var tmp_filename = ""
function random (low, high) {
return Math.random() * (high - low) + low;
}
function execute_program(){
var command = 'ruby ' + __dirname + '/check_content.rb --file '+ tmp_filename
var result = sh.exec(command);
console.log('return code ' + result.code);
console.log('stdout + stderr ' + result.stdout);
return result.stdout
}
function write_to_file (chunk){
str = chunk.toString();
fs.appendFile(tmp_filename, str, function (err){
if (err) throw err;
console.log('The "data to append" was appended to file!');
}
)
}
var server = http.createServer( function(req, res){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
res.writeHead(200, {'Content-Type': 'text/plain'});
message = req.pipe(map(function (chunk, res) {
write_to_file(chunk);
}))
var out = execute_program();
res.write(out)
message.pipe(res)
}
)
server.listen(8000)

Note: I'm not sure what you're attempting with res.write(out) and then message.pipe(res). That will write the output from your ruby script to the response and then it looks like you're piping the request body in after that. As it is, the message.pipe(res) line won't work because you'll be trying to pipe from a writable stream, which you can't do. For the purpose of this answer I'm going to assume you don't want the message.pipe(res) line; please correct me if I'm wrong.
Simple / Quick Answer
The simplest answer is to listen for the end event on req and then execute your program, etc. when that event fires:
var server = http.createServer( function(req, res){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
res.writeHead(200, {'Content-Type': 'text/plain'});
req.on('end', function() {
var out = execute_program();
res.write(out);
res.end();
});
req.pipe(map(function (chunk, res) {
write_to_file(chunk);
}));
}
)
Optimization: fs.createWriteStream()
You could also use fs.createWriteStream() instead of appending chunks with fs.appendFile(). That would look like:
var server = http.createServer( function(req, res){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
res.writeHead(200, {'Content-Type': 'text/plain'});
req.on('end', function() {
var out = execute_program();
res.write(out);
res.end();
});
req.pipe(fs.createWriteStream(tmp_filename));
}
)
With that in place, you can remove your entire write_to_file function.
That will also prevent a problem you might have if you happen to get the same random number for your temp file; with your current code, you'll append to the existing file, which is probably not what you want. If for some reason you do want to append to the file in that case, you can just pass the a flag (for append) to fs.createWriteStream: fs.createWriteStream(tmp_filename, {flags: 'a'});.

Thanks Mike S. The message.pipe(res) and res.write(out) is redundant...but i did some research and used async.waterfall package and the following worked for me. but will take your suggestion for optimization
var http = require('http');
var map = require('through2-map');
var fs = require('fs');
var str = "";
var sh = require('execSync');
var async = require('async');
var tmp_filename = "";
function random (low, high) {
return Math.random() * (high - low) + low;
}
function write_to_file (chunk){
str = chunk.toString();
fs.appendFile(tmp_filename, str, function (err){
if (err) throw err;
console.log('The "data to append" was appended to file!');
}
)
}
function execute_program(){
var command = 'ruby ' + __dirname + '/check_content --file ' + tmp_filename
var result = sh.exec(command);
console.log('return code ' + result.code);
console.log('stdout + stderr ' + result.stdout);
return result.stdout
}
async.waterfall([
function(callback){
var server = http.createServer(function(req, res){
callback(null, req, res)
}
).listen(8000);
},
function(req, res, callback){
tmp_filename = '/tmp/tmp_template_' + random(1000, 99999) + '.template'
var message = req.pipe(map(function (chunk) {
write_to_file(chunk);
}))
setTimeout(function () {
callback(null, req, res, message);
}, 666);
},
function(req, res, message, callback){
var out = execute_program()
res.write(out)
message.pipe(res)
callback(null, 'done');
}
])

Related

how to send multiple image object to client in node express?

I am new in angular. I have a set of images & I want to display it on to the client browser on a load of the page.
Is it possible or not?
I am able to send a single file but not multiple files now I am stuck can someone help me.
Thanks for help
router.get('/getList', function(req, res, next) {
var fileNames = [];
fileNames = readDir.readSync('/NodeWorkspace/uploads/output/', ['**.png']);
var data = {};
fileNames.forEach(function(filename) {
filepath = path.join(__dirname, '../../uploads/output') + '/' + filename;
fs.readFile(path.join(__dirname, '../../uploads/output') + '/' + filename, function(err, content) {
if (!err) {
console.log(content);
}
});
});
//res.sendFile(path.join(__dirname,'../../uploads/output/', fileNames[0]));
response.data = fileNames;
res.json(response);
});
Convert fs callback to promise and read all parallel once done send response.
const fs = require('fs');
const db = require('../db');
const readFile = util.promisify(fs.readFile);
router.get('/getList', function (req, res, next) {
var fileNames = [];
fileNames = readDir.readSync('/NodeWorkspace/uploads/output/', ['**.png']); // use async function instead of sync
var data = {};
const files = fileNames.map(function (filename) {
filepath = path.join(__dirname, '../../uploads/output') + '/' + filename;
return readFile(filepath); //updated here
});
Promise.all(files).then(fileNames => {
response.data = fileNames;
res.json(response);
}).catch(error => {
res.status(400).json(response);
});
});

Need to send response after an action has completed

I am trying to make a web crawler which crawls IMDB and lists the movie name and rating. This is my index.js file.
Suppose i am crawling for 10 movies. I am then saving the crawled results in a different file say 'message.txt'. Now i want to send this message.txt file as a response to any request. But whenever I make a request it always send me an empty file to my browser initially. Then i notice that it takes some time before the crawled results are saved in the message.txt file. I think this is because all actions are asynchronous in nodejs. So is there a way to send the message.txt file only after crawling is complete?
var express = require('express');
var app = express();
var cheerio = require('cheerio');
var request = require('request');
var fs = require('fs');
app.listen(8080);
console.log('Running');
app.get('/', function(req, res) {
console.log('Recieved the get Request');
var i = 1;
var count = 0;
while (count < 10) {
var url = 'http://www.imdb.com/title/tt' + i + '/';
console.log(url);
count = count + 1;
i = i + 1;
request(url, function(error, response, html) {
if (!error) {
var $ = cheerio.load(html);
var title, ratings, released;
var json = {
title: '',
ratings: '',
released: ''
};
$('.title_wrapper').filter(function() {
var data = $(this);
json.title = data.children().first().text().trim();
json.released = data.children().last().children().last().text().trim();
});
$('.ratingValue').filter(function() {
var data = $(this);
json.ratings = parseFloat(data.text().trim());
});
console.log(json);
fs.appendFile('message.txt', JSON.stringify(json, null, 4) + '\n', function(err) {});
};
});
};
res.sendFile(__dirname + '/index.js');
});
You can use the async package which is great for controlling flow, something like:
console.log('Recieved the get Request');
var i = 1;
var count = 0;
while (count < 10) {
var url = 'http://www.imdb.com/title/tt' + i + '/';
console.log(url);
count = count + 1;
i = i + 1;
async.waterfall([
function sendRequest (callback) {
if (!error) {
var $ = cheero.load(html);
var json = {
title: '',
ratings: '',
released: ''
}
}
$('.title_wrapper').filter(function() {
var data = $(this);
json.title = data.children().first().text().trim();
json.released = data.children().last().children().last().text().trim();
});
$('.ratingValue').filter(function() {
var data = $(this);
json.ratings = parseFloat(data.text().trim());
});
callback(null, JSON.stringify(json, null, 4) + '\n');
},
function appendFile (json, callback) {
fs.appendFile('message.txt', json, function(err) {
if (err) { callback(err); }
callback();
});
}
], function(err) {
res.sendFile(__dirname + '/index.js');
});
fs.appendFile('message.txt', JSON.stringify(json, null, 4) + '\n', function(err) {
//This part is executed after the process has been completed
});
You have to make a callback there as that part will be only called when your operation has been performed.
We are utilizing the callback feature here although there isn't any concrete callback except the err in our case, we don't need any other badly though.
Please try.
fs.appendFile() is asynchronous so the stuff you append to the file won't be there right away when the function returns. So if you want to read send that file to the user, you'll need to do it inside the callback you supply to fs.appendFile().
app.get('/', function(req, res) {
...
fs.appendFile(
'message.txt',
JSON.stringify(json, null, 4) + '\n',
function(err) {
if (err) {
// Log the error and send a message to the user here
return;
}
res.sendFile(__dirname + '/index.js')
}
);
};
});
};
});
You may be tempted to use fs.appendFileSync() instead. That would be fine for a command line tool, but since this is a web server, do not do that. It will lock up the thread while the I/O happens.

Video from Mongo Grid fs is not playing on Safari browser (also on Cordova app)

I am using Mongodb to store video files as grid fs. It surprised me today when I came to know that video is not playing on Safari browser. However video read from Gridfs is playing fine on Chrome & Firefox. Following are two approach to read video files back from Grid fs. Both approach has same problem. I do the that correct mime type is getting set.
Approach 1:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.open(function (err, gridStore) {
var stream = gridStore.stream(true);
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
stream.on("data", function (chunk) {
log.debug("Chunk of file data");
res.write(chunk);
});
stream.on("end", function () {
log.debug("EOF of file");
res.end();
});
stream.on("close", function () {
log.debug("Finished reading the file");
});
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Approach 2:
exports.previewFile = function (req, res) {
var contentId = new DBModule.BSON.ObjectID(req.params.fileid);
log.debug('Calling previewFile inside FileUploadService for content id ' + contentId);
//Read metadata details from fs.files
var query = {_id: contentId};
documentOperationModule.getDocumentByQuery(query, constants.FS_FILES_COLLECTION, function (err, files) {
if (!Utilities.isEmptyList(files)) {
var fileObj = files[0];
var gridStore = DBModule.db.gridStore(contentId, 'r');
gridStore.read(function (err, data) {
if (!err) {
if (!Utilities.isEmptyObject(fileObj.metadata)) {
res.setHeader('Content-Type', fileObj.metadata.contentType);
}
res.end(data);
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: err});
}
});
} else {
log.error({err: err}, 'Failed to read the content for id ' + contentId);
res.status(constants.HTTP_CODE_INTERNAL_SERVER_ERROR);
res.json({error: contentId + " not found"});
}
});
};
Following is screen of Safari for reference.
Please help
Try this GIST (by https://gist.github.com/psi-4ward)
It makes use of the byte range header
https://gist.github.com/psi-4ward/7099001
Although it does not work for me with safari, it makes sure that the correct hears are set and the correct content is delivered. It could narrow down your problem
EDIT
I've updated the GIST. It works now fine with Safari for me
https://gist.github.com/derMani/218bd18cc926d85a57a1
This should solve your problem
function StreamGridFile(req, res, GridFile) {
if(req.headers['range']) {
// Range request, partialle stream the file
console.log('Range Reuqest');
var parts = req.headers['range'].replace(/bytes=/, "").split("-");
var partialstart = parts[0];
var partialend = parts[1];
var start = parseInt(partialstart, 10);
var end = partialend ? parseInt(partialend, 10) : GridFile.length -1;
var chunksize = (end-start)+1;
res.writeHead(206, {
'Content-disposition': 'filename=xyz',
'Accept-Ranges': 'bytes',
'Content-Type': GridFile.contentType,
'Content-Range': 'bytes ' + start + '-' + end + '/' + GridFile.length,
'Content-Length': chunksize
});
// Set filepointer
GridFile.seek(start, function() {
// get GridFile stream
var stream = GridFile.stream(true);
// write to response
stream.on('data', function(buff) {
// count data to abort streaming if range-end is reached
// perhaps theres a better way?
if(start >= end) {
// enough data send, abort
GridFile.close();
res.end();
} else {
res.write(buff);
}
});
});
} else {
// stream back whole file
console.log('No Range Request');
res.header('Content-Type', GridFile.contentType);
res.header('Content-Length', GridFile.length);
var stream = GridFile.stream(true);
stream.pipe(res);
}
}
Regards
Rolf

Why is this not working, And how do you debug code in nodejs?

First I'm trying to learn nodejs, and for that I am writing like a router. As you have in express.
This is my code:
function sirus() {
var util = utilFuncs(),
paths = {};
this.router = util.handleReq;
this.con = {
get: function (path, fn, options) {
options = (options || {}).method = "GET";
util.makeRequest(path, fn, options);
},
post: function (path, fn, options) {
options = (options || {}).method = "POST";
util.makeRequest(path, fn, options);
}
};
this.req = {
get: function (path, fn) {
util.addPath("GET", path, fn);
},
post: function (path, fn) {
util.addPath("POST", path, fn);
}
};
function utilFuncs() {
function handleReq(req, res) {
var url = parsePath(req.url);
var path = paths[req.method + url];
// console.log(req.url, url +' requested');
if (typeof path != "function") {
res.writeHead(404);
} else {
path(req, res);
}
res.end();
}
function addPath(type, path, callback) {
// add path as a key of object for easier look up, root=/, /a=/a, /a/=/a, /a?=/a, /a/?..=/a so there are no duplicates path=parsePath(path);
paths[type + path] = callback;
}
function parsePath(path) {
path = url.parse(path).pathname;
if ((/[^\/]+\/(?=$)/igm).test(path)) path = path.substring(0, path.length - 1);
return path;
}
function makeRequest(path, fn, options) {
var urls = url.parse(path);
var d = {
host: null,
hostname: null,
method: "GET",
path: '/',
port: 80,
headers: {},
auth: null,
agent: false,
keepAlive: false
};
for (var k in options) d[k] = options[k];
d.host = urls.host;
d.hostname = urls.hostname;
d.path = urls.pathname;
d.headers['Content-Type'] = 'application/x-www-form-urlencoded';
d.headers['Content-Length'] = ((d || {}).body || {}).length || '';
var req = http.request(options, function (res) {
// console.log('STATUS: ' + res.statusCode);
// console.log('HEADERS: ' + JSON.stringify(res.headers));
res.setEncoding('utf8');
var data = '';
res.on('data', function (chunk) {
data += chunk;
});
res.on('end', function (chunk) {
data += chunk;
});
res.on('response', function () {
fn(res);
});
});
req.on('error', function (e) {
console.log('problem with request: ' + e.message);
});
req.write(d.body);
req.end();
}
return {
makeRequest: makeRequest,
handleReq: handleReq,
addPath: addPath
};
}
}
and i use it like this:
var http = require('http'),
url = require('url'),
app = new sirus();
http.createServer(function (req, res) {
app.router(req, res);
}).listen(80, '127.0.0.1');
app.req.get('/', function (req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
var link = "https://www.reddit.com/";
res.write('Click here');
});
app.con.get('http://www.google.com/', function (res) {
console.log('from req: ' + res);
});
The error i get is The First argument must be string or buffer
Receiving part works correctly, but when i started to add make request part something isn't right.
Also since I'm new to JS and Nodejs. I'd like general advice or anything that catch your eye something that i can be improved about this code. Since I'm note sure i am doing things the best way.
On line 85
req.write(d.body);
there is no member named "body" in the "d" object.
1 - Why is this not working - you get exactly error that tells you why it is not working. If you check more details about error it shows the file and which line error happened at. Perhaps the arguments you pass to function call are not what Express is expecting.
2 - How to debug - look at node.js package called nodev.

How do I download a bunch of files from a remote server, and concatenate them in a certain order using Node.js?

I'm trying to read the contents of a bunch of javascript files on a server, and then concatenate them into a new local file. The files have to be concatenated in a specific order (specified in an array). Here's what I have so far:
var http = require('http');
var fs = require('fs');
var commonWebFiles = getCommonWebDependenciesInOrder();
var fileContents = [];
var path = '/folder/';
fs.mkdir("target");
for(var i = 0, l = commonWebFiles.length; i < l; ++i){
getFileContents(path, commonWebFiles[i]);
}
function getCommonWebDependenciesInOrder(){
//Hit manager to get an correctly ordered array of common web dependencies
//Stub
return [
'file1.js',
'file2.js',
'file3.js'
];
};
function getFileContents(path, filename){
var contents = "";
var writeStream = fs.createWriteStream("target/" + filename, {'flags': 'a'});
var options = {
host: 'ahost.net',
port: 80,
path: path + filename
};
var req = http.get(options, function(res) {
res.on('data', function(chunk) {
contents += chunk;
});
res.on('end', function() {
writeStream.write(contents, encoding='binary');
writeStream.end();
fileContents[filename] = contents;
});
}).on('error', function(e) {
console.log("Got error: " + e.message);
});
};
This downloads the files and recreates them locally, but it seems a little clunky. When I tried to just write a single file directly from a looped set of requests, I got the chunks out of order....I feel like there must be an easier way....
Thanks in advance.
Use async and request:
var fs = require('fs'),
async = require('async'),
request = require('request');
// Utility function to overcome request's callback (err, response, body) where we're only interested in err and body
function simpleRequest(url, callback) {
request(url, function(err, response, body) {
callback(err, body);
});
}
async.map(urls, simpleRequest, function(err, results) {
if(err)
return console.error(err);
fs.writeFile(outfile, results.join(''));
});

Resources