How can I use fs.read() in Node js - node.js

I try to use Nodejs fs.read Method in Mac OS. However it doesn't work..
I use below source code
var fs = require('fs');
fs.open('helloworld.txt', 'r', function(err, fd) {
fs.fstat(fd, function(err, stats) {
var bufferSize=stats.size ,
chunkSize=512,
buffer=new Buffer(bufferSize),
bytesRead = 0;
while (bytesRead < bufferSize) {
if ((bytesRead + chunkSize) > bufferSize) {
chunkSize = (bufferSize - bytesRead);
}
fs.read(fd, buffer, bytesRead, chunkSize, bytesRead, testCallback);
bytesRead += chunkSize;
}
console.log(buffer.toString('utf8'));
});
fs.close(fd);
});
var testCallback = function(err, bytesRead, buffer){
console.log('err : ' + err);
};
Actually, I use some example in stackoverflow.
When I execute the source,
err : Error: EBADF, read
this err is returned.
However if I use readFile method, it works well.
fs.readFile('helloworld.txt', function (err, data) {
if (err) throw err;
console.log(data.toString('utf8'));
});
result is
Hello World!
Of course, it's same file.
Please, let me know what the problem is.
Thank you.

The difference is not int the functions you are using, but in the way you are using them.
All those fs.* functions you are using are asynchronous, that means they run in parallel. So, when you run fs.close, the others have not finished yet.
You should close it inside the fs.stat block:
var fs = require('fs');
fs.open('helloworld.txt', 'r', function(err, fd) {
fs.fstat(fd, function(err, stats) {
var bufferSize=stats.size ,
chunkSize=512,
buffer=new Buffer(bufferSize),
bytesRead = 0;
while (bytesRead < bufferSize) {
if ((bytesRead + chunkSize) > bufferSize) {
chunkSize = (bufferSize - bytesRead);
}
fs.read(fd, buffer, bytesRead, chunkSize, bytesRead, testCallback);
bytesRead += chunkSize;
}
console.log(buffer.toString('utf8'));
fs.close(fd);
});
});
var testCallback = function(err, bytesRead, buffer){
console.log('err : ' + err);
};

fs.read and fs.fstat are async functions.
And just after calling, fstat, you are closing the file (fs.close(fd);).
That could be the reason for this error.

The error in the code is that you are currently using asynchronus functions in your application. Which simply means that before performing the first operation the other one starts in parallel i.e. fs.close() closes your file before performing the operation. You can simple replace them with synchronus functions provided by the same fs module from node.js.

Related

nodejs - Detect end of file (EOF)

I need to read a file into buffers in nodejs and use the "old" read() function for this, as I need to have strict size buffers.
I have a loop to read the file chunk by chunk. But how can I detect if the end of file (EOF) is reached?
My code is:
do
fs.read(fd, buf1, 0, CHUNKSIZE, null, (err, bytesRead, buffer) => {
console.log("Bytes read: " + bytesRead);
console.log("Buffer: " + buffer);
});
while (!EOF)
But how can I determine EOF?
The end of stream is when chunkSize > bytesRead, be sure to convert the buffer to a string using bytesRead or you may end up with junk characters.
function readAll(fd, chunkSize, callback) {
fs.read(fd, new Buffer(chunkSize), 0, chunkSize, null, (err, bytesRead, buffer) => {
if (err) {
return callback(err);
}
console.log(buffer.toString('utf8', 0, bytesRead));
if (chunkSize > bytesRead) {
callback();
} else {
readAll(fd, chunkSize, callback);
}
});
}

node js synchronous foreach easy example?

I want to handle nodejs asynchronous issue.
please help me with a sweet example - it ll be better for me if you able to do it by callback or like callback related thing.
Thanks
Sample examples
Foreach loop
var fs = require('fs')
var paths = ['/home' , '/root', '/var']
paths.forEach(function( path ) {
fs.lstat( path, function(err, stat) {
console.log( path, stat );
});
});
For loop
for (var i = 0, c = paths.length; i < c; i++)
{
// creating an Immiedately Invoked Function Expression
(function( path ) {
fs.lstat(path, function (error, stat) {
console.log(path, stat);
});
})( paths[i] );
// passing paths[i] in as "path" in the closure
}
Recursion
function iteratePath(paths, i, max){
if(i<max) {
return;
}else{
fs.lstat( path, function(err, stat) {
console.log( path, stat );
//Recursive call back
iteratePath(paths, i, max)
});
}
}
var paths = ['/home' , '/root', '/var']
iteratePath(paths,0, size)
You can use async
Using async/await with a forEach loop

Node Async ReadStream from SFTP connection

So I'm creating a class and ultimately want to create a method that takes a file on an SFTP server and produces a readstream that can be piped into other streams / functions. I'm most of the way there, except my readStream is acting strangely. Here's the relevant code:
const Client = require('ssh2').Client,
Readable = require('stream').Readable,
async = require('async');
/**
* Class Definition stuff
* ......
*/
getStream(get) {
const self = this;
const rs = new Readable;
rs._read = function() {
const read = this;
self.conn.on('ready', function(){
self.conn.sftp(function(err,sftp) {
if(err) return err;
sftp.open(get, 'r', function(err, fd){
sftp.fstat(fd, function(err, stats) {
let bufferSize = stats.size,
chunkSize = 512,//bytes
buffer = new Buffer(bufferSize),
bytesRead = 0;
async.whilst(
function () {
return bytesRead < bufferSize;
},
function (done) {
sftp.read(fd, buffer, bytesRead, chunkSize, bytesRead,
function (err, bytes, buff) {
if (err) return done(err);
// console.log(buff.toString('utf8'));
read.push(buff);
bytesRead += bytes;
done();
});
},
function (err) {
if (err) console.log(err);
read.push(null);
sftp.close(fd);
}
);
});
});
});
}).connect(self.connectionObj);
}
return rs;
}
Elsewhere, I would call this method like so:
let sftp = new SFTP(credentials);
sftp.getStream('/path/file.csv')
.pipe(toStuff);
.pipe(toOutput);
So, long story short. During the SFTP.read operation read.push(buff) keeps pushing the same first part of the file over and over. However, when I console.log(buff) it correctly streams the full file?
So I'm scratching my head wondering what I'm doing wrong with the read stream that it's only pushing the beginning of the file and not continuing on to the next part of the buffer.
Here's the docs on SSH2 SFTP client: https://github.com/mscdex/ssh2-streams/blob/master/SFTPStream.md
I used this SO question as inspiration for what I wrote above: node.js fs.read() example
This is similar/related: Reading file from SFTP server using Node.js and SSH2
Ok, after a lot of trouble, I realized I was making a couple mistakes. First, the _read function is called every time the stream is ready to read more data, which means, the SFTP connection was being started everytime _read was called. This also meant the sftp.read() function was starting over each time, reseting the starting point back to the beginning.
I needed a way to first setup the connection, then read and stream the file data, so I chose the library noms. Here's the final code if anyone is interested:
getStream (get) {
const self = this;
let connection,
fileData,
buffer,
totalBytes = 0,
bytesRead = 0;
return nom(
// _read function
function(size, next) {
const read = this;
// Check if we're done reading
if(bytesRead === totalBytes) {
connection.close(fileData);
connection.end();
self.conn.end();
console.log('done');
return read.push(null);
}
// Make sure we read the last bit of the file
if ((bytesRead + size) > totalBytes) {
size = (totalBytes - bytesRead);
}
// Read each chunk of the file
connection.read(fileData, buffer, bytesRead, size, bytesRead,
function (err, byteCount, buff, pos) {
// console.log(buff.toString('utf8'));
// console.log('reading');
bytesRead += byteCount;
read.push(buff);
next();
}
);
},
// Before Function
function(start) {
// setup the connection BEFORE we start _read
self.conn.on('ready', function(){
self.conn.sftp(function(err,sftp) {
if(err) return err;
sftp.open(get, 'r', function(err, fd){
sftp.fstat(fd, function(err, stats) {
connection = sftp;
fileData = fd;
totalBytes = stats.size;
buffer = new Buffer(totalBytes);
console.log('made connection');
start();
});
});
});
}).connect(self.connectionObj);
})
}
Always looking for feedback. This doesn't run quite as fast as I'd hope, so let me know if you have ideas on speeding up the stream.

fs.read have a different behaviour

Any idea why fs.read cannot behave as fs.readSync?
My code is very simple, just read out the songs file chunk by chunk. And I find out with fs.readSync function that the song file can read out 512 bytes everytime while with fs.read function, there is no log info printed out and if i delete the while(readPosition < fileSize), it executes only one time.
var chunkSize = 512; //the chunk size that will be read every time
var readPostion = 0; //the first byte which will be read from the file.
var fileSize =0;
var fs=require('fs');
//var Buffer = require("buffer");
//var songsBuf = Buffer.alloc(512);
var songsBuf = new Buffer(chunkSize);
fs.open('/media/sdcard/song.mp3','r',function(err,fd){
if(err)
throw err;
console.log("The file had been opened");
var fileSize = fs.fstatSync(fd).size;
console.log("The total size of this file is:%d Bytes",fileSize);
console.log("Start to read the file chunk by chunk");
//read the file in sync mode
while(readPostion<fileSize)
{
fs.readSync(fd,songsBuf,0,chunkSize,readPostion);
if(readPostion+chunkSize>fileSize)
chunkSize = fileSize-readPostion;
readPostion+=chunkSize;
console.log("the read position is %d",readPostion);
console.log("The chunk size is %d",chunkSize);
console.log(songsBuf);
}
//the code above can readout the file chunk by chunk but the below one cannot
//read the file in Async mode.
while(readPostion<fileSize)
{
// console.log("ff");
fs.read(fd,songsBuf,0,chunkSize,1,function(err,byteNum,buffer){
if(err)
throw err;
console.log("Start to read from %d byte",readPostion);
console.log("Total bytes are %d",byteNum);
console.log(buffer);
if(readPostion+chunkSize>fileSize)
chunkSize = fileSize-readPostion; //if the files to read is smaller than one chunk
readPostion+=chunkSize;
});
}
fs.close(fd);
});
You can do this with the async library.
async.whilst(
function () { return readPostion < fileSize },
function (callback) {
fs.read(fd, songsBuf, 0, chunkSize, 1, function (err, byteNum, buffer) {
if (err) return callback(err)
console.log("Start to read from %d byte",readPostion);
console.log("Total bytes are %d",byteNum);
console.log(buffer);
if(readPostion + chunkSize > fileSize)
chunkSize = fileSize - readPostion; //if the files to read is smaller than one chunk
readPostion += chunkSize
callback(null, songBuffer)
})
},
function (err, n) {
if (err) console.error(err)
fs.close(fd)
// Do something with songBuffer here
}
)

Read a file one character at a time in node.js?

Is there a way to read one symbol at a time in nodejs from file without storing the whole file in memory?
I found an answer for lines
I tried something like this but it doesn't help:
const stream = fs.createReadStream("walmart.dump", {
encoding: 'utf8',
fd: null,
bufferSize: 1,
});
stream.on('data', function(sym){
console.log(sym);
});
Readable stream has a read() method, where you can pass the length, in bytes, of every chunk to be read. For example:
var readable = fs.createReadStream("walmart.dump", {
encoding: 'utf8',
fd: null,
});
readable.on('readable', function() {
var chunk;
while (null !== (chunk = readable.read(1) /* here */)) {
console.log(chunk); // chunk is one byte
}
});
Here's a lower-level way to do it: fs.read(fd, buffer, offset, length, position, callback)
using:
const fs = require('fs');
// open file for reading, returns file descriptor
const fd = fs.openSync('your-file.txt','r');
function readOneCharFromFile(position, cb){
// only need to store one byte (one character)
const b = new Buffer(1);
fs.read(fd, b, 0, 1, position, function(err,bytesRead, buffer){
console.log('data => ', String(buffer));
cb(err,buffer);
});
}
you will have to increment the position, as you read the file, but it will work.
here's a quick example of how to read a whole file, character by character
Just for fun I wrote this complete script to do it, just pass in a different file path, and it should work
const async = require('async');
const fs = require('fs');
const path = require('path');
function read(fd, position, cb) {
let isByteRead = null;
let ret = new Buffer(0);
async.whilst(
function () {
return isByteRead !== false;
},
function (cb) {
readOneCharFromFile(fd, position++, function (err, bytesRead, buffer) {
if(err){
return cb(err);
}
isByteRead = !!bytesRead;
if(isByteRead){
ret = Buffer.concat([ret,buffer]);
}
cb(null);
});
},
function (err) {
cb(err, ret);
}
);
}
function readOneCharFromFile(fd, position, cb) {
// only need to store one byte (one character)
const b = new Buffer(1);
fs.read(fd, b, 0, 1, position, cb);
}
/// use your own file here
const file = path.resolve(__dirname + '/fixtures/abc.txt');
const fd = fs.openSync(file, 'r');
// start reading at position 0, position will be incremented
read(fd, 0, function (err, data) {
if (err) {
console.error(err.stack || err);
}
else {
console.log('data => ', String(data));
}
fs.closeSync(fd);
});
As you can see we increment the position integer every time we read the file. Hopefully the OS keeps the file in memory as we go. Using async.whilst() is OK, but I think for a more functional style it's better not to keep the state in the top of the function (ret and isByteRead). I will leave it as an exercise to the reader to implement this without using those stateful variables.

Resources