Javascript: Write and complete an existing file - node.js

I try to write some nodejs code that completes an existing file. But each time I start the script, it over writes the content of my test file.
var fs = require('fs');
var writer = fs.createWriteStream('test.txt', {flags: 'w'});
writer.on('finish', function() {
console.log('data has been saved successfully');
});
function writeInList(id) {
console.log(id);
writer.write(id+' \n');
}
for (var id = 0; id<10; id++){
writeInList(id);
}
writer.end();
Of course I have searched for a solution:
Writing large files with Node.js
But I'm not able to make it run. Could anybody help me please?

Related

Deleting file in node.js not working

I am using Node.js with Express.
I am trying to delete a file after sending it to client with express js.
function deleteFile (file) {
fs.unlink(file, function (err) {
if (err) {
logger.error(err);
}
});
}
app.get("/deleteFileAfterDownload", function (req, res){
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
var streamClosed = false;
req.on('end',function(){
if (!streamClosed){
stream.emit('close');
// I tried stream.destroy() but that is also not working
}
});
stream.on('close', function () {
streamClosed = true;
deleteFile(fileName);
});
req.on('data', function(){});
stream.pipe(res);
});
But the file is not getting deleted. it seems the process is still using file because just after I end the process, the file is getting deleted.
Can anybody tell me why?
If I am doing it wrong, please tell me a good way.
Please add a log in deleteFile, makesure it is called.
Try simplify it:
var fileName = "a.pdf"
var stream = fs.createReadStream(fileName);
stream.pipe(res);
res.once("finish", function () {
deleteFile(fileName);
});
The previous example only delete file if download finished,
if you want delete file unconditionly, try the following:
var fileName = "a.pdf";
var stream = fs.createReadStream(fileName);
stream.pipe(res).once("close", function () {
stream.close();
deleteFile(fileName);
});
stream.close() is important here, because stream not close if pipe aborted.

How to Synchronize the file writes in Node.Js

I am using the EJS compile to create notification templates and I would like to know how to write the file to the file system in parallel and send the notification once all the files are saved.
Please see the below code snippet which I used
var fs = require('fs');
var ejs = require('ejs');
var arrayOfData = [someData]; //Prepare data from database
//Iterate through the data
for (var i = 0; i < arrayOfData.length; i++) {
generateFileFromTemplate(arrayOfData[i],function(){});
}
function generateFileFromTemplate(templateData,callback)
{
var outputFile = fileData.Id + ".html";
var compiled = ejs.compile(fs.readFileSync('email-template.ejs', 'utf8'));
var html = compiled(templateData);
fs.writeFile(outputFile, html, callback);
}
Please help.
Use async.each for your use case
async.each(arrayOfData,
function(ele, next){
generateFileFromTemplate(ele,function(){});
},
function(err){
if(err) console.log('err', err);
sendNotification();
}
);
You can use a great utility library called Async, particularly its parallel method: https://github.com/caolan/async#parallel.
Here's an example:
var async = require('async');
/*-------------*/
var tasks = arrayOfData.map(function(data) {
return function(cb) {
generateFileFromTemplate(data,function(){});
cb(null);
}
});
async.parallel(tasks, function(err) {
console.log('My job is done');
})

Read newest csv records with Node

I'd like to watch a CSV file and get the newest records since it was changed. I'm running the following shell command to build a very simple csv file and append a new line every second:
rm test.csv & x=0 && while true; do echo "${x},${x},${x}" >> test.csv; x=$(($x+1)); sleep 1; done
The following code prints all the records of the file until the first change and then just emits the dashed line, as if it's not re-reading the file:
'use strict';
var fs = require('fs'),
dataFile = __dirname + '/server/data/test.csv',
csv = require('csv');
var parser = csv.parse({delimiter: ','}, function(err, data){
console.log(data);
});
var watcher = fs.watch(dataFile);
watcher.on('change', fileChange);
function fileChange(e, fn){
if (e) console.error(e)
fs.createReadStream(dataFile).pipe(parser);
console.log('-------')
}
Shouldn't the fileChange function re-read the file on every change? My ultimate plan here is to get both the previous array of lines and the current one and use lodash's difference function to return only the differences. If there's better way, I'm open to hear it though.
My guess is that fs.createReadStream() has opened the file and it's not being closed. So on the second event fs.createReadStream() fails. No bueno.
Try using fs.readFile() instead like this:
function fileChange(e, fn){
if (e) console.error(e)
fs.readFile(dataFile, function (err, data) {
if (err) throw err;
console.log(data);
console.log('-------')
});
};
See the documentation here: http://nodejs.org/api/fs.html#fs_fs_readfile_filename_options_callback
I ended up solving the issue by stating the file on change, and reading the difference in size to the stream data:
'use strict';
var fs = require('fs'),
dataFile = __dirname + '/server/data/test.csv',
readSize = 0,
csv = require('csv');
var parser = csv.parse();
parser.on('readable', function(data){
var record;
while(record = parser.read()){
console.log(record);
}
});
var watcher = fs.watch(dataFile);
watcher.on('change', fileChange);
// fires when the watched file changes
function fileChange(e, fn){
// get these syncronously
var stats = fs.statSync(dataFile);
// if it's smaller, wait half a second
if (stats.size <= readSize) {
setTimeout(fileChange, 500);
}
// read the stream offset
var stream = fs.createReadStream(dataFile, {start: readSize, end: stats.size});
stream.on('data', function(chunk){
parser.write(chunk.toString());
});
readSize = stats.size;
}
Any feedback on why this may not work would be appreciated.

How to read file to variable in NodeJs?

i'm pretty new into NodeJs. And i am trying to read a file into a variable.
Here is my code.
var fs = require("fs"),
path = require("path"),
util = require("util");
var content;
console.log(content);
fs.readFile(path.join(__dirname,"helpers","test.txt"), 'utf8',function (err,data) {
if (err) {
console.log(err);
process.exit(1);
}
content = util.format(data,"test","test","test");
});
console.log(content);
But every time i run the script i get
undefined and undefined
What am i missing? Help please!
As stated in the comments under your question, node is asynchronous - meaning that your function has not completed execution when your second console.log function is called.
If you move the log statement inside the the callback after reading the file, you should see the contents outputted:
var fs = require("fs"),
path = require("path"),
util = require("util");
var content;
console.log(content);
fs.readFile(path.join(__dirname, "helpers", "test.txt"), 'utf8', function (err, data) {
if (err) {
console.log(err);
process.exit(1);
}
content = util.format(data, "test", "test", "test");
console.log(content);
});
Even though this will solve your immediately problem, without an understanding of the async nature of node, you're going to encounter a lot of issues.
This similar stackoverflow answer goes into more details of what other alternatives are available.
The following code snippet uses ReadStream. It reads your data in separated chunks, if your data file is small it will read the data in a single chunk. However this is a asynchronous task. So if you want to perform any task with your data, you need to include them within the ReadStream portion.
var fs = require('fs');
var readStream = fs.createReadStream(__dirname + '/readMe.txt', 'utf8');
/* include the file directory and file name instead of <__dirname + '/readMe.txt'> */
var content;
readStream.on('data', function(chunk){
content = chunk;
performTask();
});
function performTask(){
console.log(content);
}
There is also another easy way by using synchronous task. As this is a synchronous task, you do not need to worry about its executions. The program will only move to the next line after execution of the current line unlike the asynchronous task.
A more clear and detailed answer is provided in the following link:
Get data from fs.readFile
var fs = require('fs');
var content = fs.readFileSync('readMe.txt','utf8');
/* include your file name instead of <'readMe.txt'> and make sure the file is in the same directory. */
or easily as follows:
const fs = require('fs');
const doAsync = require('doasync');
doAsync(fs).readFile('./file.txt')
.then((data) => console.log(data));

Save a image using nodejs, expressjs and socket.io

I've tried to save a image to a specified directory with node.js using express.js and socket.io but it doesnt work.
On the client-side:
var reader = new FileReader();
function drop(e) {
e.stopPropagation();
e.preventDefault();
var dt = e.dataTransfer;
var files = dt.files;
jQuery.each(files, function(){
reader.onload = function(e) {
socket.emit('sendfile', e.target.result);
};
});
return false;
}
The image should be uploaded by a drag and drop function.
Then on the server-side:
io.sockets.on('connection', function (socket) {
[...]
socket.on('sendfile', function (data) {
var fs = require('fs');
app.use(express.bodyParser({ keepExtensions: true, uploadDir: '/uploaded' }));
io.sockets.emit('updatechat', socket.username, data); //test
});
I have also tried
socket.on('sendfile', function (data) {
var fs = require('fs');
fs.writeFile('/uploaded/test.png', data, "binary" , function (err) {
if (err) throw err;
console.log('It\'s saved!');
});
io.sockets.emit('updatechat', socket.username, data); //data test
});
but it doesnt saved anything.
The "data test" shows me, that the data are already were arrived on the server, so I don't think, that the problem comes from the client-side, but on the server-side I have no idea what I doing wrong
I made a simple example to illustrate the usage of file upload via socket!
The steps following are:
Create the send-file socket.io event to receive the file on app.js. This file received is a binary one;
In the jade/HTML page put an input file and a button to send it. NOTE: you don't have to use multipart to send a post with multipart content, we are sending socket files not a TCP request/response;
Initialize HTML5 File API support and prepare the listeners to watching out your file input component;
The rest of remaining routines to read the file and sent it content forward.
Now first step (app.js):
var io = require('socket.io').listen(8000, {log: false});
io.sockets.on('connection', function(socket) {
socket.on('send-file', function(name, buffer) {
var fs = require('fs');
//path to store uploaded files (NOTE: presumed you have created the folders)
var fileName = __dirname + '/tmp/uploads/' + name;
fs.open(fileName, 'a', 0755, function(err, fd) {
if (err) throw err;
fs.write(fd, buffer, null, 'Binary', function(err, written, buff) {
fs.close(fd, function() {
console.log('File saved successful!');
});
})
});
});
});
Second step (in my case I've used jade rather html)
extends layout
block content
h1 Tiny Uploader
p Save an Image to the Server
input#input-files(type='file', name='files[]', data-url='/upload', multiple)
button#send-file(onclick='javascript:sendFile();') Send
script(src='http://127.0.0.1:8000/socket.io/socket.io.js')
script(src='/javascripts/uploader.js')
Third and Fourth steps (coding uploader.js to send the file to server)
//variable declaration
var filesUpload = null;
var file = null;
var socket = io.connect('http://localhost:8000');
var send = false;
if (window.File && window.FileReader && window.FileList) {
//HTML5 File API ready
init();
} else {
//browser has no support for HTML5 File API
//send a error message or something like that
//TODO
}
/**
* Initialize the listeners and send the file if have.
*/
function init() {
filesUpload = document.getElementById('input-files');
filesUpload.addEventListener('change', fileHandler, false);
}
/**
* Handle the file change event to send it content.
* #param e
*/
function fileHandler(e) {
var files = e.target.files || e.dataTransfer.files;
if (files) {
//send only the first one
file = files[0];
}
}
function sendFile() {
if (file) {
//read the file content and prepare to send it
var reader = new FileReader();
reader.onload = function(e) {
console.log('Sending file...');
//get all content
var buffer = e.target.result;
//send the content via socket
socket.emit('send-file', file.name, buffer);
};
reader.readAsBinaryString(file);
}
}
Some important considerations:
This is a tiny sample of socket file uploader. I don't consider some important things here: file chunks to send piece of files instead of all content in a row; Update the status of file sent as (error msg, successful msg, progress bar or percent stage, etc.). So this is a sample to initial steps to coding your own file uploader. In this case, we don't need a form to send files, its is completely asynchronous transaction via socket.io.
I hope this post is helpful.
This tutorial goes a little bit further because you can pause/resume your upload but you will find how to upload a file through socketio :)

Resources