I am using the EJS compile to create notification templates and I would like to know how to write the file to the file system in parallel and send the notification once all the files are saved.
Please see the below code snippet which I used
var fs = require('fs');
var ejs = require('ejs');
var arrayOfData = [someData]; //Prepare data from database
//Iterate through the data
for (var i = 0; i < arrayOfData.length; i++) {
generateFileFromTemplate(arrayOfData[i],function(){});
}
function generateFileFromTemplate(templateData,callback)
{
var outputFile = fileData.Id + ".html";
var compiled = ejs.compile(fs.readFileSync('email-template.ejs', 'utf8'));
var html = compiled(templateData);
fs.writeFile(outputFile, html, callback);
}
Please help.
Use async.each for your use case
async.each(arrayOfData,
function(ele, next){
generateFileFromTemplate(ele,function(){});
},
function(err){
if(err) console.log('err', err);
sendNotification();
}
);
You can use a great utility library called Async, particularly its parallel method: https://github.com/caolan/async#parallel.
Here's an example:
var async = require('async');
/*-------------*/
var tasks = arrayOfData.map(function(data) {
return function(cb) {
generateFileFromTemplate(data,function(){});
cb(null);
}
});
async.parallel(tasks, function(err) {
console.log('My job is done');
})
Related
How to handle concurrent file write requests on a node server with socket.io. I am using this to write:
fs.writefile('abc.txt','datatobewritten','utf8',function(err){});
I have a file abc.txt and suppose two users try to write on the same time on this file then I am getting an error, so how do I queue multiple requests.
You have to synchronize the writes.
For a single instance of nodejs you can use simple queue, like this:
module.exports = function(path, content, cb){
var queue = queues[path];
if (queue == null)
queue = queues[path] = new Queue;
queue.add(path, content, (err) => {
cb(err);
queue.next();
});
};
var fs = require('fs');
var queues = {};
class Queue {
constructor () {
this.queue = [];
}
next () {
if (this.queue.length === 0)
return;
var [path, content, cb] = this.queue[0];
fs.writeFile(path, content, 'utf8', (err) => {
this.queue.shift();
cb(err);
});
}
add (...args) {
this.queue.push(args);
if (this.queue.length === 1) {
this.next();
}
}
}
In multi-process instance you have to use some locking, for example with lockfile.
var lockFile = require('lockfile');
var fs = require('fs');
module.exports = function(path, content, cb) {
lockFile.lock('foo.lock', function (err) {
if (err) return cb(err);
fs.writeFile(path, content, cb);
lockFile.unlock('foo.lock');
});
}
For better performance you can even combine 2 approaches here.
You should write a module names logs or what ever you like.
logs.js
var fs = require('fs');
var writeStream = fs.createWriteStream('./abc.txt');
module.exports = {
/* PUSH */
write: function (message, cb) {
writeStream.write(message, cb);
}
}
Then in your socket.io related module just require the module at the top like
var logs = require('./logs');
and write message like this in the socket.io callbacks ! :)
logs.write('datatobewritten');
Bottom line
"use fs.createWriteStream instead of fs.writefile"
Hope this makes sense :)
I need to download ~26k images. The images list and urls are stored in csv file. Im reading the csv file and trying to download the images while looping through the list.
If im using small set ~1-2k it works fine but when i switch to the full set im getting EMFILE error.
Error: EMFILE, open 'S:\images_download\Images\189900008.jpg'
I've noticed that node tries to create all the files at once and this might be the issue but i'm unable to force it to create it one by one. My understanding is the code below should work like this but obviously is not.
(Just to mention that this code is executed on Windows)
Code:
var csv = require("fast-csv");
var fs = require('fs');
var request = require('request');
var async = require('async');
fs.writeFile('errors.txt', '', function(){})
var downloaded = 0;
var totalImages = 0;
var files = [];
csv
.fromPath("Device_Images_List.csv")
.on("data", function(data){
files.push({device: data[0], url: data[1]})
})
.on("end", function(){
totalImages = files.length;
async.each(files, function(file, callback) {
var deviceId = file.device;
var deviceUrl = file.url;
if ( deviceId != 'DEVICE_TYPE_KEY' ) {
try {
writeStream = fs.createWriteStream('./Images/' + deviceId + '.jpg');
proxiedRequest = request.defaults({proxy: "http://proxy:8080"});
proxiedRequest(deviceUrl).pipe(writeStream);
writeStream.on('open', function(fd) {
var rem = proxiedRequest.get(deviceUrl);
rem.on('data', function(chunk) {
writeStream.write(chunk);
});
rem.on('end', function() {
downloaded++;
console.log('Downloaded: ' + deviceId + '; ' + (downloaded + 1) + ' of ' + totalImages);
writeStream.end();
});
});
writeStream.on('close', function(){
callback();
});
} catch (ex) {
fs.appendFile('errors.txt', deviceId + ' failed to download', function (err) {
callback();
});
}
}
}, function(err){
if( err ) {
console.log(err);
} else {
}
});
});
As #slebetman commented the issue can be solved by using async.eachSeries to process the files one by one or async.eachLimit to limit the parallel nodes:
async.eachLimit(files, 5, function(file, callback) {
// ... Process 5 files at the same time
}, function(err){
});
So I'm trying to fetch a bunch of files from a server. The current code is basically as follows.
var http = require('http');
var fs = require('fs');
var arr = [{id:'fileOne', id:'fileTwo', id:'fileThree',....];
function fetchData() {
for (var i = 0; i < arr.length; i++) {
var file = fs.createWriteStream("../path/file.zip");
var request = http.get("url/AFG_adm.zip", function(response) {
response.pipe(file);
});
}
}
I don't think this is the best approach, trying to figure out how to handle errors, how to make sure that a file gets loaded before the next iteration... Any help is much appreciated.
You should use the async module for handling the async part, also the request module will save you a lot of effort.
You can handle this in many ways using either async.cargo or async.map.
The theory is to group up things or a series of things, and then take action according to what you want it to do, but in async way.
so a basic .map of an array of files to download would be like this.
// required modules
var async = require('async');
var request = require('request');
// array of urls
var URLs = ['hxxp://...ZipFile1.zip', 'hxxp://...ZipFile2.zip'];
// destination directory
var destinationDirectory = 'downloads';
// asyncDownload function
function asyncDownload(url, callback) {
// get filename
var filename = url.substring(url.lastIndexOf(".") + 1);
// create write stream
var stream = fs.createWriteStream(destinationDirectory + "/" + filename);
// listen for open event to start request and pipe
stream.on('open', function () {
request(url).pipe(stream);
})
// when finish , call callback
stream.on('finish', function () {
callback(null, destinationDirectory + "/" + filename);
})
}
async.map(
URLs, asyncDownload, function (err, results) {
console.log(results);
});
I'm currently working on a small API with nodejs and restify that requires a file upload done, by receiving a binary string.
What I dont know how to do, is test it with mocha, so Ive been doing some search and found this on stack overflow Unit test file upload with mocha, its a fine start but it wont work because its sending a multipart form, and what I require the client to send on the api is the file as a stream.
Heres my controller:
exports.uploadVideo = function(req, res, next) {
var video = "public/video/" + req.params.videoId + ".mp4",
util = require('util'),
exec = require('child_process').exec;
var newFile = fs.createWriteStream("./uploads/" + video);
req.pipe(newFile);
req.on('end', function () {
var cmd = 'qtfaststart ' + './uploads/' + video;
var qtfaststart = exec(cmd, function(error, stdout, stderr){
if (error === "atom not found, is this a valid MOV/MP4 file?\n" || error !== null) {
return next(new restify.ConflictError("Error: " + stdout));
} else {
fs.chmodSync('./uploads/' + video, '644');
Video.findOne( { _id: req.params.videoId }, function(err, video) {
if (err) return next(new restify.ConflictError(err));
if (!video) {
newVideo = new Video({
_id: req.params.videoId,
file: video});
newVideo.save()
} else {
video.file = video;
video.increment();
video.save();
}
});
}
});
});
req.on('error', function(err){
return next(new restify.NetworkConnectTimeoutError(err));
});
};
So given this controller which receives a stream (binary file), and puts the stream together on the backend, how would I test this controller with mocha?
You could just use http for that:
it('should be possible to upload a file', function(done) {
var http = require('http');
var options = require('url').parse(YOUR_URL);
options.method = 'POST';
var req = http.request(options, function(response) {
// TODO: check for errors, correct response, etc...
done(...);
});
require('fs').createReadStream(YOUR_TEST_FILE).pipe(req);
});
You want to use the request module from within mocha. It supports multi-part forms.
I have a problem in getting a .json file in express and displaying in a view. Kindly share your examples.
var fs = require("fs"),
json;
function readJsonFileSync(filepath, encoding){
if (typeof (encoding) == 'undefined'){
encoding = 'utf8';
}
var file = fs.readFileSync(filepath, encoding);
return JSON.parse(file);
}
function getConfig(file){
var filepath = __dirname + '/' + file;
return readJsonFileSync(filepath);
}
//assume that config.json is in application root
json = getConfig('config.json');
Do something like this in your controller.
To get the json file's content :
ES5
var foo = require('./path/to/your/file.json');
ES6
import foo from './path/to/your/file.json';
To send the json to your view:
function getJson(req, res, next){
res.send(foo);
}
This should send the json content to your view via a request.
NOTE
According to BTMPL
While this will work, do take note that require calls are cached and will return the same object on each subsequent call. Any change you make to the .json file when the server is running will not be reflected in subsequent responses from the server.
This one worked for me. Using fs module:
var fs = require('fs');
function readJSONFile(filename, callback) {
fs.readFile(filename, function (err, data) {
if(err) {
callback(err);
return;
}
try {
callback(null, JSON.parse(data));
} catch(exception) {
callback(exception);
}
});
}
Usage:
readJSONFile('../../data.json', function (err, json) {
if(err) { throw err; }
console.log(json);
});
Source