nodejs error: spawn ENOENT - node.js

I am very newbie with nodejs, I am trying to run it:
I obtain:
events.js:72
throw er; // Unhandled 'error' event
^
Error: spawn ENOENT
at errnoException (child_process.js:980:11)
at Process.ChildProcess._handle.onexit (child_process.js:771:34)
How Can I solve?
May be the problem is inside this code:
/**
*/
/*jslint vars: true, plusplus: true, devel: true, nomen: true, indent: 4, maxerr: 50 */
/*global define, require, module, process */
var childprocess = require("child_process"),
util = require("util"),
fs = require("fs");
var procWrapper = require("./processwrapper");
var spawn = childprocess.spawn;
module.exports = function () {
"use strict";
var pvs = procWrapper();
var o = {},
output = [],
readyString = "<PVSio>",
wordsIgnored = ["", "==>", readyString],
restarting = false,
sourceCode,
filename,
processReady = false,
pvsio,
workspaceDir = process.cwd() + "/public/";
/**
* get or set the workspace dir. this is the base directory of the pvs source code
* #param {String} dir
* #return {String} the current workspace directory
*/
o.workspaceDir = function (dir) {
if (dir) {util.log("OK");
dir = dir.substr(-1) !== "/" ? (dir + "/") : dir;
workspaceDir = dir;
util.log("OOO");
return o;
}
util.log("IIII");
return workspaceDir;
};
/**
* starts the pvs process with the given sourcefile
* #param {String} filename source file to load with pvsio
* #param {function({type:string, data:array})} callback function to call when any data is received in the stdout
* #param {function} callback to call when processis ready
*/
o.start = function (file, callback, processReadyCallback) {
filename = o.workspaceDir() + file;
function onDataReceived(data) {
var lines = data.split("\n").map(function (d) {
return d.trim();
});
var lastLine = lines[lines.length - 1];
//copy lines into the output list ignoring the exit string, the startoutput string '==>'
//and any blank lines
output = output.concat(lines.filter(function (d) {
return wordsIgnored.indexOf(d) < 0;
}));
if (processReady && lastLine.indexOf(readyString) > -1) {
var outString = output.join("").replace(/,/g, ", ").replace(/\s+\:\=/g, ":=").replace(/\:\=\s+/g, ":=");
//This is a hack to remove garbage collection messages from the output string before we send to the client
var croppedString = outString.substring(0, outString.indexOf("(#"));
outString = outString.substring(outString.indexOf("(#"));
util.log(outString);
callback({type: "pvsoutput", data: [outString]});
//clear the output
output = [];
} else if (lastLine.indexOf(readyString) > -1) {
//last line of the output is the ready string
processReadyCallback({type: "processReady", data: output});
processReady = true;
output = [];
}
}
function onProcessExited(code) {
processReady = false;
var msg = "pvsio process exited with code " + code;
util.log(msg);
callback({type: "processExited", data: msg, code: code});
}
pvs.start({processName: "pvsio", args: [filename],
onDataReceived: onDataReceived,
onProcessExited: onProcessExited});
util.log("pvsio process started with file " + filename + "; process working directory is :" + o.workspaceDir());
util.log("OK");
return o;
};
/**
* sends a command to the pvsio process. This method returns immediately. The result of the command
* will be by the 'on data' event of the process standard output stream
* #param {string} command the command to send to pvsio
*/
o.sendCommand = function (command) {
util.log("sending command " + command + " to process");
pvs.sendCommand(command);
return o;
};
/**
* gets the source code pvs io is executing
* #param {string} path the path the to file whose content is to be fetched
* #param {function({type:string, data, message:string})} callback callback to execute when sourcecode has been loaded
* #returns {this}
*/
o.readFile = function (path, callback) {
pvs.readFile(path, callback);
return o;
};
/**
* writes the file passed to the disk
* #param {fileName:string, fileContent: string} data Object representing the sourcecode to save
* #param {function ({type: string, data: {fileName: string}})} callback function to invoke when the file has been saved
*/
o.writeFile = function (path, data, callback) {
pvs.writeFile(path, data, callback);
return o;
};
/**
* closes the pvsio process
* #param {string} signal The signal to send to the kill process. Default is 'SIGTERM'
*/
o.close = function (signal) {
signal = signal || 'SIGTERM';
pvs.kill(signal);
return o;
};
return o;
};
After have started it , I go to localhost:8082 and it crashes!
That's all.
I' am sorry, it's possible that throw this little information you can't help me.

I got the same error, after updating a lot of programs on my Windows 7 machine. Luckily I could remember that I removed git and installed it again using a newer version and different installation options. The difference was to select the 3rd option in "Adjusting PATH environment" settings ("Use Git and optional Unix tools from the Windows Command Prompt").
After updating node.js to a newer version (actually 0.10.31) the problem still exists. So I deciced to remove git again, and et'voila, socket server is working again. Now I will install git again with the default options, which do not to modify the PATH environment variable.
So the problem comes from unix tools accessible through PATH variable such as installed by MinGW, Git or Cygwin (probably - not tested).

Read readme
Try to install this using npm install pvsio-web -g or (if your /usr/local/ directory is owned by root) sudo npm install pvsio-web -g
You should installed pvs and pvsio on your computer
You should ensure that pvsio is available as a command on your path

If you don't want to install pvsio globally you can run it from node_modules/.bin
You should have to register to 'error' events of spawn, see: NodeJS: throw er; //Unhandled 'error' event (events.js:72) when using child_process spawn method

A very simple cause of this error: you're installing an application on a new server and the code is trying to run a linux CLI program that hasn't yet been installed on that particular server.
In my case, the code was spawning a process running inotify. So going to the linux command line and running
apt-get install inotify-tools
resolved the error.

Related

puppeteer files from lib folder not exist on server side install

On my windows machine I got a lot of files in my lib folder:
./node_modules/puppeteer/lib
However when I install puppeteer on my ubuntu server (Ubuntu 20.04 LTS) all of those files does not exist. There is only a cjs & esm folder.
I got a custom function in my script which access the lib files like this:
// extract the Page class
const { Page } = require("puppeteer/lib/Page");
/**
* #name elementContains
* #param {String} selector specific selector globally search and match
* #param {String} text filter the elements with the specified text
* #returns {Promise} elementHandle
*/
Page.prototype.elementContains = function elementContains(...args) {
return this.evaluateHandle((selector, text) => {
// get all selectors for this specific selector
const elements = [...document.querySelectorAll(selector)];
// find element by text
const results = elements.filter(element => element.innerText.includes(text));
// get the last element because that's how querySelectorAll serializes the result
return results[results.length-1];
}, ...args);
};
/**
* Replicate the .get function
* gets an element from the executionContext
* #param {String} selector
* #returns {Promise}
*/
const { JSHandle } = require("puppeteer/lib/JSHandle");
JSHandle.prototype.get = function get(selector) {
// get the context and evaluate inside
return this._context.evaluateHandle(
(element, selector) => {
return element.querySelector(selector);
},
// pass the JSHandle which is itself
this,
selector
);
};
Everything works fine on my windows machine. However on my ubuntu server it does not work because those files does not exist. I manually uploaded all files into this folder from my windows machine however I get then:
-TypeError: page.elementContains is not a function
Can you help me please why does the lib folder doesn´t contain the needed files on linux?

How to include a static mapping file as part of a nodeJS module?

Assume that I have a nodeJS module whose index.js is as below
module.exports = function() {
var map = {},
CSV_FILE = "./input.csv",
_loadCSV = function() {
var fs = require('fs'),
parse = require('csv-parse'),
rawCSVData = fs.readFileSync(CSV_FILE).toString(),
i,
item;
parse(rawCSVData, {columns: true}, function(err, data) {
for (i = 0; i < data.length; i++) {
item = data[i];
map[item.key] = item.value;
}
});
},
_init = function() {
_loadCSV();
};
// Init
_init();
// Public
return {
/**
* getValue
*/
getValue: function(key) {
return map[key];
}
};
};
Now, everything works fine if I test locally. However, when I install this module in another project I get below error.
fs.js:549 return binding.open(pathModule._makeLong(path), stringToFlags(flags), mode);
^
Error: ENOENT: no such file or directory, open 'input.csv' at Error (native)
Is it possible to include a static mapping file as part of a nodeJS module that is used in module initialization?
Your problem is this line CSV_FILE = "./input.csv". It works locally because the script you're executing (index.js) is in the same directory as the input.csv-file. However, when you install it as a dependency, the input.csv-file is actually somewhere in ./node_modules/your-module/input.csv, hence your new index.js can't see any ./input.csv-file since it's not located in the same directory as the calling script.
There are two ways to solve this, the first one being the smartest in my opinion.
Do not distribute the input.csv-file. This is a very bad approach to building modules, and you should rather change your code so that your module accepts a path to a .csv-file that it loads. However your module may need static data, but in those cases it's smarter to just convert it to a JavaScript Object and include it directly.
Simply change one line of code,
from CSV_FILE = "./input.csv"
to CSV_FILE = __dirname + "/input.csv"
See documentation for __dirname

How to create application level variable in node.js

In my project I have running more than 100 cron jon using npm cron. My problem is I need to stop any cron job at run time.
my code is
in app.js file
var cronJobRunner =require('cronJobHandle.js');
global.jobManager = {};
cronJobRunner.startServiceFromCronJobManager("service 1")
in cronJobHandle.js
var CronJob = require('cron').CronJob;
module.exports = {
startServiceFromCronJobManager: function (scheduleServiceName) {
var serviceList =['service1','service2','service3','service4']
serviceList.forEach(function(service){
var job = new CronJob('* * * * * *', function(){
console.log("Service running");
}, function () {
// This function is executed when the job stops
},
true /* Start the job right now */,
timeZone /* Time zone of this job. */
);
global.jobManager.service = job;
});
},
stopServiceFromCronJobManager: function (scheduleServiceName) {
console.log(global.jobManager);
global.jobManager[scheduleServiceName].stop();
}
};
router.js
var cronJobRunner =require('cronJobHandle.js');
app.route('/stopservice',function(req,res){
cronJobRunner.stopServiceFromCronJobManager("service1");
}
When I call http://localhost:9999/stopservice
I am getting undefine in console.log(global.jobManager);
Please help me how to maintain cron jobManager variable comman for all server side js files
It is global but there's two bugs in your code.
1) you're calling the property called "service" instead of the one whose name is in service.
Change
global.jobManager.service = job;
to
global.jobManager[service] = job;
2) you're pottentially using global.jobManager in cronJobHandle.js before to declare it in app.js
There's a better solution. you don't need and should use global. Just declare a standard variable in cronJobHandle.js instead (so that it isn't accessed by other modules):
var jobManager = {};
...
jobManager[service] = job;
...
jobManager[scheduleServiceName].stop();

Is 7zip stdout broken? Is there a way to capture the progress in nodejs? [Windows]

I am trying to get the stdout of 7zip when it processes files and get the percentage in nodeJs, but it doesn't behave as expected. 7zip doesn't output anything to stdout until the very end of the execution. Which is not very helpful.. especially when I have large files being compressed and no feedback is shown for a very long time.
The code I am using (simplified):
// 7zip test, place the 7z.exe in the same dir, if it's not on %PATH%
var cp = require('child_process');
var inputFile = process.argv[2]; if(inputFile==null) return;
var regProgress = /(\d{1,3})%\s*$/; //get the last percentage of the string, 3 digits
var proc = cp.spawn("7z.exe",["a","-t7z" ,"-y" ,inputFile + ".7z",inputFile]);
proc.stdout.setEncoding("utf8");
proc.stdout.on("data",function(data){
if(regProgress.test(data))
console.log("Progress = " + regProgress.exec(data)[1] + "%");
});
proc.once("exit",function(exit,sig){ console.log("Complete"); });
I have used the same code to get the percentage with WinRar successfully and I am beginning to think that 7zip might be buggy? Or I am doing it wrong? Can I forcefully read the stdout of a process with a timer perhaps?
The same code above, with the exception of the following line replaced, works as expected with WinRar.
var proc = cp.spawn("Rar.exe",["a","-s","-ma5","-o+",inputFile+".rar",inputFile]);
If anyone knows why this happens and if it is fixable, I would be grateful! :-)
p.s. I have tried 7za.exe, the command line version of 7zip, also the stable, beta and alpha versions, they all have the same issue
It is no longer needed to use a terminal emulator like pty.js, you can pass the -bsp1 to 7z to force to output the progress to stdout.
7-zip only outputs progress when stdout is a terminal.
To trick 7-zip, you need to npm install pty.js (requires Visual Studio or VS Express with Windows SDK) and then use code like:
var pty = require('pty');
var inputFile = process.argv[2],
pathTo7zip = 'c:\\Program Files\\7-Zip\\7z.exe';
if (inputFile == null)
return;
var term = pty.spawn(process.env.ComSpec, [], {
name: 'ansi',
cols: 200,
rows: 30,
cwd: process.env.HOME,
env: process.env
});
var rePrg = /(\d{1,3})%\r\n?/g,
reEsc = /\u001b\[\w{2}/g,
reCwd = new RegExp('^' + process.cwd().replace(/\\/g, '\\\\'), 'm');
prompts = 0,
buffer = '';
term.on('data', function(data) {
var m, idx;
buffer += data;
// remove terminal escape sequences
buffer = buffer.replace(reEsc, '');
// check for multiple progress indicators in the current buffer
while (m = rePrg.exec(buffer)) {
idx = m.index + m[0].length;
console.log(m[1] + ' percent done!');
}
// check for the cmd.exe prompt
if (m = reCwd.exec(buffer)) {
if (++prompts === 2) {
// command is done
return term.kill();
} else {
// first prompt is before we started the actual 7-zip process
if (idx === undefined) {
// we didn't see a progress indicator, so make sure to truncate the
// prompt from our buffer so that we don't accidentally detect the same
// prompt twice
buffer = buffer.substring(m.index + m[0].length);
return;
}
}
}
// truncate the part of our buffer that we're done processing
if (idx !== undefined)
buffer = buffer.substring(idx);
});
term.write('"'
+ pathTo7zip
+ '" a -t7z -y "'
+ inputFile
+ '.7z" "'
+ inputFile
+ '"\r');
It should be noted that 7-zip does not always output 100% at finish. If the file compresses quickly, you may just see only a single 57% for example, so you will have to handle that however you want.

How to gZip content that's passed through a piped readStream

Im currently working on a project that requires the content to be gZip-ed before it's sent back to the browser.
Im currently using a simple read stream and piping the data to the response of a request, but im not sure the best way to gZip content without blocking requests
The line that send the data is:
require('fs').createReadStream(self.staticPath + Request.url).pipe(Response);
See the following class is the static handler object:
(function(){
var StaticFeeder = function()
{
this.staticPath = process.cwd() + '/application/static';
this.contentTypes = require('./contenttypes')
}
StaticFeeder.prototype.handle = function(Request,Response,callback)
{
var self = this;
if(Request.url == '/')
{
return false;
}
if(Request.url.indexOf('../') > -1)
{
return false;
}
require('path').exists(this.staticPath + Request.url,function(isthere){
/*
* If no file exists, pass back to the main handler and return
* */
if(isthere === false)
{
callback(false);
return;
}
/*
* Get the extention if possible
* */
var ext = require('path').extname(Request.url).replace('.','')
/*
* Get the Content-Type
* */
var ctype = self.contentTypes[ext] !== undefined ? self.contentTypes[ext] : 'application/octet-stream';
/*
* Send the Content-Type
* */
Response.setHeader('Content-Type',ctype);
/*
* Create a readable stream and send the file
* */
require('fs').createReadStream(self.staticPath + Request.url).pipe(Response);
/*
* Tell the main handler we have delt with the response
* */
callback(true);
})
}
module.exports = new StaticFeeder();
})();
Can anyone help me get around this problem, i haven't a clue on how to tell the piping to compress with gZip.
Thanks
Actually, I have a blog post on just this thing. http://dhruvbird.blogspot.com/2011/03/node-and-proxydecorator-pattern.html
You will need to:
npm install compress -g
Before using it though.
The basic idea revolves around using pipes to add functionality.
However, for your use-case, you would be better off putting node.js behind nginx to do all the gzip'ing since node.js is a single process (actually not), and the gzip routines would take up your process' CPU.
You can just pipe it through a compression stream:
var fs = require('fs')
var zlib = require('zlib')
fs.createReadStream(file)
.pipe(zlib.createGzip())
.pipe(Response)
assumes the file is not compressed already and you have already set all the headers for the response.

Resources