NodeJS on BeagleBoard fs.write Error Unknown -1 - node.js

I am trying to create a NodeJS library that allows usage of the BeagleBone's serial (uart) ports. Some of the pins are muxed so some configuration bits have to be written to two files. Here is my function that writes the configuration bits to enable the uart:
var setMuxForUart = function (uart, next) {
var txFd, rxFd;
var txBuf = new Buffer(uart.muxTx.config, 'ascii');
var rxBuf = new Buffer(uart.muxRx.config, 'ascii');
var txBytesWritten, rxBytesWritten;
console.log ("Configuring UART MUX for " + uart.path);
txFd = fs.openSync (MUX_PATH + uart.muxTx.path, 'w');
rxFd = fs.openSync (MUX_PATH + uart.muxRx.path, 'w');
if (txFd && rxFd) {
try {
txBytesWritten = fs.writeSync (txFd, txBuf, 0, txBuf.length, 0);
}
catch (e) {
fs.closeSync (txFd);
fs.closeSync (rxFd);
console.log ('Error Writing to file: '+ MUX_PATH + uart.muxTx.path + ' | ' + util.inspect (e));
return;
}
try {
rxBytesWritten = fs.writeSync (rxFd, rxBuf, 0, rxBuf.length, 0);
}
catch (e) {
fs.closeSync (txFd);
fs.closeSync (rxFd);
console.log ('Error Writing to file: ' + MUX_PATH + uart.muxRx.path + ' | ' + util.inspect(e));
return;
}
fs.closeSync (txFd);
fs.closeSync (rxFd);
if (txBytesWritten && rxBytesWritten) {
console.log ("Uart MUX finished configuration");
next ();
}
else {
console.log ("An error occured writing to the UART MUX.");
}
}
else {
console.log ("An error occured while opening the UART MUX files.");
}
};
Here is the file containing this funcion.
Here is the output from running this function:
root#beaglebone:~/workspace/BonescriptSerial# node BonescriptSerial.js
The "sys" module is now called "util". It should have a similar interface.
Opening Serial Port for: /dev/ttyO1
Configuring UART MUX for /dev/ttyO1
Error Writing to file: /sys/kernel/debug/omap_mux/uart1_txd | { [Error: UNKNOWN, unknown error] errno: -1, code: 'UNKNOWN', syscall: 'write' }
I have verified that the correct output is written to test files, I have tried numerous mode parameters ('0777' doesn't matter), I have tried this with the sync and async functions to no avail, and I have also tried, successfully, writing to these files in python. If you have any ideas that would help solve this problem I would be very grateful.
Here is a github repo of the project, its in its infancy right now so there isn't alot of documentation. The python version is in the repo as well.

Thanks to Ben Noordhuis on the NodeJS google group I was pointed to the following issue that was causing the problem. The device driver that I was trying to write to apparently doesn't accept arbitrary seek writes, so in order to get around that I needed to trick NodeJS into using write instead of pwrite. The trick is to tell the write command to start writing at -1 instead of 0.
fs.writeSync (txFd, txBuf, 0, txBuf.length, -1);

Related

Node.js: Error ECONNRESET while piping a file

I have this error while "piping" a file in node:
events.js:160
throw er; // Unhandled 'error' event
^
Error: read ECONNRESET
at exports._errnoException (util.js:1022:11)
at Pipe.onread (net.js:569:26)
Here is my code:
var json_file = fs.createWriteStream(jsonFile),
processes = 0,
read_ended_flag = false,
converter = new Converter(params);
let read_csv_file = fs.createReadStream(csvFilePath);
// For each csv line, we get a json doc
converter.on('record_parsed', jsonObj => {
processes++;
json_file.write(JSON.stringify(jsonObj) + '\n', () => {
processes--;
if(read_ended_flag && processes == 0){
json_file.end();
callback();
}
});
});
converter.on('end_parsed', () => {
read_ended_flag = true;
});
read_csv_file
.pipe(converter);
I tried to catch error using this or this, but it still the same.
This bug only comes while working with small files ( > 100 lines ).
Is it because that the read stream is closed before writing in the new file ?
Many thanks for any tips & helps !
I found the solution ! :)
It was due to my Converter param (csvtojson module), i put workerNum to 4, instead i have to put 1 for dealing small files.
workerNum: Number of worker processes. The worker process will use multi-cores to help process CSV data. Set to number of Core to improve the performance of processing large csv file. Keep 1 for small csv files. Default 1.
Here is a complete tutorial about csvtojson module
Hope it helps others !

Renaming multiple files asyncroniosly causing error in Node.js

I am trying to rename a bunch of pre-generated testing files (1000+) asynchronously in Node.js.
The code looks like the following:
const fs = require('fs')
const { each } = require('async')
each(files, file => {
let newfile = 'new' + file
fs.rename(file, newfile, err => {
err ? console.log(err) : console.log('renamed')
}
})
This leads to following error:
Uncaught Error: ENOENT: no such file or directory, lstat '8d3320e35d22772f'
at fs.lstatSync (fs.js:902:18)
at Object.fs.lstatSync
It's not async module issue, since replacing each with native forEach leads to the same error. Also, there are no issues when using synchronous version of rename fs.renameSync.
I think it's trying to move some file twice or so but can't figure where exactly mistake is. Made this assumption, because all files have been already renamed successfully and very likely error generated afterward. Can someone advice what causing such behavior?
My bad. Just in case someone curious, this error came from following underlying function:
function rmDir(dir) {
var list = fs.readdirSync(dir);
for(var i = 0; i < list.length; i++) {
var dirOrFile = path.join(dir, list[i]);
var stat = fs.lstatSync(dirOrFile);
if(dirOrFile == "." || dirOrFile == "..") {
// pass these files
} else if (stat.isDirectory()) {
// rmdir recursively
rmDir(dirOrFile);
}
// else { fs.unlinkSync(dirOrFile) } // rm fiilename
}
fs.rmdirSync(dir);
}

NodeJs : why my Writablestream does not close before end of program?

I have a NodeJS script that I use to write JSON objects to file :
var reWriteCodes = function(arrayCodes, pathFileCodes, fs, typereWriteCodes, cb) {
console.log("message : "+ typereWriteCodes);
var codesStream = fs.createWriteStream(pathFileCodes, {'flags': 'w'});
codesStream.on('finish', () => {
console.log("the stream is closed")
if (typereWriteCodes === 0) {
cb();
}
});
var jsonToWrite = { "liste_codes" : arrayCodes};
codesStream.write(JSON.stringify(jsonToWrite));
codesStream.end();
}
This function is called twiced (first with typereWriteCodes = 0 then with typereWriteCodes = 1, inside the callback function : cb) with two different files.
The first call ends fine, my file is saved and the message "the stream is closed" displays in the console. But in the second call (which is the last operation of the program), my file is not saved correctly (the file is empty) and the message "the stream is closed" is not triggerd. Also, I get this message :
events.js:141
throw er; // Unhandled 'error' event
^
Error: ENOENT: no such file or directory, open ''
at Error (native)
I have the feeling that the app is closing before the stream could end correctly ... But I do not know how I can do this correctly. Could you help me with this issue ? Any help appreciated.

Is 7zip stdout broken? Is there a way to capture the progress in nodejs? [Windows]

I am trying to get the stdout of 7zip when it processes files and get the percentage in nodeJs, but it doesn't behave as expected. 7zip doesn't output anything to stdout until the very end of the execution. Which is not very helpful.. especially when I have large files being compressed and no feedback is shown for a very long time.
The code I am using (simplified):
// 7zip test, place the 7z.exe in the same dir, if it's not on %PATH%
var cp = require('child_process');
var inputFile = process.argv[2]; if(inputFile==null) return;
var regProgress = /(\d{1,3})%\s*$/; //get the last percentage of the string, 3 digits
var proc = cp.spawn("7z.exe",["a","-t7z" ,"-y" ,inputFile + ".7z",inputFile]);
proc.stdout.setEncoding("utf8");
proc.stdout.on("data",function(data){
if(regProgress.test(data))
console.log("Progress = " + regProgress.exec(data)[1] + "%");
});
proc.once("exit",function(exit,sig){ console.log("Complete"); });
I have used the same code to get the percentage with WinRar successfully and I am beginning to think that 7zip might be buggy? Or I am doing it wrong? Can I forcefully read the stdout of a process with a timer perhaps?
The same code above, with the exception of the following line replaced, works as expected with WinRar.
var proc = cp.spawn("Rar.exe",["a","-s","-ma5","-o+",inputFile+".rar",inputFile]);
If anyone knows why this happens and if it is fixable, I would be grateful! :-)
p.s. I have tried 7za.exe, the command line version of 7zip, also the stable, beta and alpha versions, they all have the same issue
It is no longer needed to use a terminal emulator like pty.js, you can pass the -bsp1 to 7z to force to output the progress to stdout.
7-zip only outputs progress when stdout is a terminal.
To trick 7-zip, you need to npm install pty.js (requires Visual Studio or VS Express with Windows SDK) and then use code like:
var pty = require('pty');
var inputFile = process.argv[2],
pathTo7zip = 'c:\\Program Files\\7-Zip\\7z.exe';
if (inputFile == null)
return;
var term = pty.spawn(process.env.ComSpec, [], {
name: 'ansi',
cols: 200,
rows: 30,
cwd: process.env.HOME,
env: process.env
});
var rePrg = /(\d{1,3})%\r\n?/g,
reEsc = /\u001b\[\w{2}/g,
reCwd = new RegExp('^' + process.cwd().replace(/\\/g, '\\\\'), 'm');
prompts = 0,
buffer = '';
term.on('data', function(data) {
var m, idx;
buffer += data;
// remove terminal escape sequences
buffer = buffer.replace(reEsc, '');
// check for multiple progress indicators in the current buffer
while (m = rePrg.exec(buffer)) {
idx = m.index + m[0].length;
console.log(m[1] + ' percent done!');
}
// check for the cmd.exe prompt
if (m = reCwd.exec(buffer)) {
if (++prompts === 2) {
// command is done
return term.kill();
} else {
// first prompt is before we started the actual 7-zip process
if (idx === undefined) {
// we didn't see a progress indicator, so make sure to truncate the
// prompt from our buffer so that we don't accidentally detect the same
// prompt twice
buffer = buffer.substring(m.index + m[0].length);
return;
}
}
}
// truncate the part of our buffer that we're done processing
if (idx !== undefined)
buffer = buffer.substring(idx);
});
term.write('"'
+ pathTo7zip
+ '" a -t7z -y "'
+ inputFile
+ '.7z" "'
+ inputFile
+ '"\r');
It should be noted that 7-zip does not always output 100% at finish. If the file compresses quickly, you may just see only a single 57% for example, so you will have to handle that however you want.

Meteor/Node writeFile crashes server

I have the following code:
Meteor.methods({
saveFile: function(blob, name, path, encoding) {
var path = cleanPath(path), fs = __meteor_bootstrap__.require('fs'),
name = cleanName(name || 'file'), encoding = encoding || 'binary',
chroot = Meteor.chroot || 'public';
// Clean up the path. Remove any initial and final '/' -we prefix them-,
// any sort of attempt to go to the parent directory '..' and any empty directories in
// between '/////' - which may happen after removing '..'
path = chroot + (path ? '/' + path + '/' : '/');
// TODO Add file existance checks, etc...
fs.writeFile(path + name, blob, encoding, function(err) {
if (err) {
throw (new Meteor.Error(500, 'Failed to save file.', err));
} else {
console.log('The file ' + name + ' (' + encoding + ') was saved to ' + path);
}
});
function cleanPath(str) {
if (str) {
return str.replace(/\.\./g,'').replace(/\/+/g,'').
replace(/^\/+/,'').replace(/\/+$/,'');
}
}
function cleanName(str) {
return str.replace(/\.\./g,'').replace(/\//g,'');
}
}
});
Which I took from this project
https://gist.github.com/dariocravero/3922137
The code works fine, and it saves the file, however it repeats the call several time and each time it causes meteor to reset using windows version 0.5.4. The F12 console ends up looking like this: . The meteor console loops over the startup code each time the 503 happens and repeats the console logs in the saveFile function.
Furthermore in the target directory the image thumbnail keeps displaying and then display as broken, then a valid thumbnail again, as if the fs is writing it multiple times.
Here is the code that calls the function:
"click .savePhoto":function(e, template){
e.preventDefault();
var MAX_WIDTH = 400;
var MAX_HEIGHT = 300;
var id = e.srcElement.id;
var item = Session.get("employeeItem");
var file = template.find('input[name='+id+']').files[0];
// $(template).append("Loading...");
var dataURL = '/.bgimages/'+file.name;
Meteor.saveFile(file, file.name, "/.bgimages/", function(){
if(id=="goodPhoto"){
EmployeeCollection.update(item._id, { $set: { good_photo: dataURL }});
}else{
EmployeeCollection.update(item._id, { $set: { bad_photo: dataURL }});
}
// Update an image on the page with the data
$(template.find('img.'+id)).delay(1000).attr('src', dataURL);
});
},
What's causing the server to reset?
My guess would be that since Meteor has a built-in "automatic directories scanning in search for file changes", in order to implement auto relaunching of the application to newest code-base, the file you are creating is actually causing the server reset.
Meteor doesn't scan directories beginning with a dot (so called "hidden" directories) such as .git for example, so you could use this behaviour to your advantage by setting the path of your files to a .directory of your own.
You should also consider using writeFileSync insofar as Meteor methods are intended to run synchronously (inside node fibers) contrary to the usual node way of asynchronous calls, in this code it's no big deal but for example you couldn't use any Meteor mechanics inside the writeFile callback.
asynchronousCall(function(error,result){
if(error){
// handle error
}
else{
// do something with result
Collection.update(id,result);// error ! Meteor code must run inside fiber
}
});
var result=synchronousCall();
Collection.update(id,result);// good to go !
Of course there is a way to turn any asynchronous call inside a synchronous one using fibers/future, but that's beyond the point of this question : I recommend reading this EventedMind episode on node future to understand this specific area.

Resources