Monitoring for files using chokidar - node.js

I am trying to setup a monitoring piece in my app, I am using chokidar to do so.
I want the app to be awake all the time monitoring the directory for new file, once it see any new file it should move it a directory. Now i have used batch file to move the files (because with the fs.rename) I keep getting errbusy.
Here is my code, the problem is that I dont see the app being awake, It starts and close. I dont see its waiting to check on files. What am i doing wrong here
const exec = require('child_process').exec;
function movefile() { exec('movetostage.bat', function (err, stdout, stderr) {
if (err)
{
console.log(err);
return ;
}
console.log(stdout);
});
}
function monitordir(){
var watcher = chokidar.watch(DATA_PATH, {ignored: /^\./, persistent: true});
watcher
.on('add', function(event,path) {
if(event){
console.log(event,path);
movefile();
main();
}
})
}
I have added watcher.on('add'), I am guessing that is to check if there is any new files, but the important part here is that the app doesnt keep monitoring it exists

Related

Cronjobs and Nodejs

Im Trying to create log file for every day using cronjobs. Whenever I restart Node server it create a new log file for the same otherwise it writes log in same file. Please help
Take a minute to read this: https://www.npmjs.com/package/cron
And this: https://www.npmjs.com/package/moment
function startLogger() {
var name = (require('moment')()).format('YYYY-MM-DD')
var path = ('~/' + name);
// ONLY if (path) doesn't exist, create it. (Do yourself the code)
// Write to the end of the existent or created file:
(require('fs')).appendFile(path, 'Your log line or multiple lines', function (err) {
if (err) throw err;
console.log('Log saved successfully!');
});
}
PS: I do not recommend to use require inside (), I just did it because I'm without time but I hope you understand it and make your own code.

Electron 4 Windows -> electron-builder -> auto-update: custom solution

I'm building an app for windows using Electron. To package and distribute it I'm using electron-builder. Electron-builder relies on many packages, and for auto-updates it uses Squirrel-windows.
I've been battling with auto-update on Windows for 3 days and at the end I've come up with a working solution that seems to give no problems.
I wont' go into the details of what I've tried, and failed. Instead, I'll post here the solution with which I've come up.
I'm sharing it with you guys, to see if you may point out to me any flaws that will make my system fail, or, if it truly is a solid solution, to help those who are struggling as I was. For this latter reason, I'm posting some more code than it would be necessary, hoping it will help others.
The logic is as follows:
if the sub-folder fullupdate inside the path of the current executable does not exists (see later, it will be clarified), we connect with an online server and check if there is an update by sending the current app version;
if there is no update, do nothing.
if there is an update, we instruct the server to return a json string that contains the url from which we can download of the .exe installer produced by electron-builder. NB: not the .nupkg (server code not provided :-)).
we download the file and save it inside a sub folder fullupdate in the local folder in which our app is currently saved. This should be "safe" as electron-builder saves the app in the current user folder AppData, so we should not have permissions issues.
at the end of the download, we create a new file update inside the folder fullupdate to be sure the download has finished successfully. We could also rename the file, but I prefer this way.
next time the app opens:
if the folder fullupdate exists we check if the file update exists. If it does not exists, the download was not finished, so we delete the folder fullupdate and call the remote server again to start all over again.
else, if the file update exists, we launch the .exe file we have downloaded, and return true. This will prevent the app from opening the main window. The cool thing is that the updater will delete the whole old version of the app saved in AppData (while leaving local user data) and replace it with the new version. In this way we will get rid also of the folder fullupdate.
Now the code:
// we want to run this only on windows
var handleStartupEvent = function() {
if (process.platform !== 'win32') {
return false;
}
/////////////////
// MANUAL UPDATER
/////////////////
var appFolder = 'app-' + appVersion;
var pathApp = path.dirname(process.execPath);
var pathUpdate = pathApp + '\\fullupdate';
var checkupdateurl = 'https://api.mysite.com/getjson/' + appVersion.split('.').join('-');
function checkIfDownloaded(){
if (!fs.existsSync(pathUpdate)) checkUpdate();
else return checkIfInstallLocal();
}
function checkIfInstallLocal(){
if(fileExists('fullupdate\\update')) return installLocal();
else {
deleteFolderRecursive(pathUpdate);
checkUpdate();
}
}
function installLocal(){
cp.exec('fullupdate\\Update.exe', function( error, stdout, stderr){
if ( error != null ) {
console.log(stderr);
}
});
return true;
}
// from http://www.geedew.com/remove-a-directory-that-is-not-empty-in-nodejs/
var deleteFolderRecursive = function(path) {
if( fs.existsSync(path) ) {
fs.readdirSync(path).forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) deleteFolderRecursive(curPath);
else fs.unlinkSync(curPath);
});
fs.rmdirSync(path);
}
};
// from http://stackoverflow.com/questions/4482686/check-synchronously-if-file-directory-exists-in-node-js
function fileExists(path) {
try {
return fs.statSync(path).isFile();
}
catch (e) {
if (e.code == 'ENOENT') { // no such file or directory. File really does not exist
return false;
}
throw e; // something else went wrong, we don't have rights, ...
}
}
function checkUpdate(){
https.get('https://api.mysite.com/getjson/' + app.getVersion().split('.').join('-'), (res) => {
res.setEncoding('utf8');
res.on('data', function(chunk) {
if(chunk) thereIsUpdate(chunk);
});
}).on('error', (e) => {
console.log(e);
});
}
function thereIsUpdate(chunk){
var data = JSON.parse(chunk);
if(data && data.url) getNewUpdate(data.urlsetup);
}
function getNewUpdate(url){
fs.mkdirSync(pathUpdate);
var file = fs.createWriteStream(pathUpdate + '/Update.exe');
var responseSent = false; // flag to make sure that response is sent only once.
var request = https.get(url, function(response) {
response.pipe(file);
file.on('finish', () =>{
file.close(() => {
if(responseSent) return;
responseSent = true;
});
fs.closeSync(fs.openSync(pathUpdate + '/update', 'w'));
});
});
}
if(checkIfDownloaded()) return true;
/////////////////////////
// SQUIRREL EVENTS HANDLER
//////////////////////////
// see http://stackoverflow.com/questions/30105150/handle-squirrels-event-on-an-electron-app
};
// here we call the function. It is before the opening of the window, so that we prevent the opening if we are updating, or if there is a Squirrel event going on (see SO question, link above)
if (handleStartupEvent()) {
return;
}

atomic write/read of a file in nodejs

My nodejs application is built around a "project file".
Several modules (by "module", I mean a simple javascript file of my project) of this application need to load, modify and save this file often, via streams (fs.createReadStream, fs.createWriteStream), and since those modules are executed independently from each other, with sometimes an origin from an websocket events (for instance), I need to make the save/load operations of the project file atomic.
It means the following scenario:
moduleA writes the project file
in the same time, and before moduleA has finished to write the file, moduleB wants to read it => ideally, it should wait for the write operation of moduleA (currently, it reads a partially written file and detect an error) before really read the file
Is nodejs able to do this natively or do I have to build a sort of atomic wrapper over my read/write stream system?
There is to my knowledge nothing built in. There are modules such as redis-lock though, that implement a lock mechanism.
If you run on a single non-clustered server you could probably cope with implementing a simple local lock though.
This might give you an idea:
var Fs = require("fs"),
LOCK = require ("os").tmpdir () + '/foo-lock.';
function transLock(id, cb) {
Fs.open(LOCK + id, "wx", function(err, fd) {
if (err) {
// someone else has created the file
// or something went wrong
cb(err);
} else {
Fs.close(fd, function(err) {
// there should be no error here except weird stuff
// like EINTR which must be ignored on Linux
cb();
});
}
});
}
function transUnlock(id) {
Fs.unlink(LOCK + id, function(err) {
if (err) {
// something is wrong and nothing we can do except
// perhaps log something or do some background cleanup
}
});
}
function main() {
var id = "some-unique-name";
transLock(id, function(err) {
if (err)
console.log(err);
else {
// ... do your stuffs ...
transUnlock(id);
}
});
}
main();

Can a node.js server know if a server file is created?

which is the most elegant way or technology to let a node.js server know if a file is created on a server?
The idea is: a new image has been created (from a webcam or so) -> dispatch an event!
UPDATE: The name of the new file in the directory is not known a priori and the file is generated by an external software.
You should take a look at fs.watch(). It allows you to "watch" a file or directory and receive events when things change.
Note: The documentation states that fs.watch is not consistent across platforms, so you should take that in to account before using it.
fs.watch(fileOrDirectoryPath, function(event, filename) {
// Something changed with filename, trigger event appropriately
});
Also something to be aware of from the docs:
Providing filename argument in the callback is not supported on every
platform (currently it's only supported on Linux and Windows). Even on
supported platforms filename is not always guaranteed to be provided.
Therefore, don't assume that filename argument is always provided in
the callback, and have some fallback logic if it is null.
If filename is not available on your platform and you're watching a directory you may need to do something where you initially read the directory and cache the list of files in it. Then, when you get an event from fs.watch, read the directory again and compare it to the cached list of files to see what was added (if anything).
Update 1: There's a good module called watch, on github, which makes it easy to watch a directory for new files.
Update 2: I threw together an example of how to use fs.watch to get notified when new files are added to a directory. I think the module I linked to above is probably the better way to go, but I thought it would be nice to have a basic example of how it might work if you were to do it yourself.
Note: This is a fairly simplistic example just to show how it could work in general. It could almost certainly be done more efficiently and it's far from throughly tested.
function watchForNewFiles(directory, callback) {
// Get a list of all the files in the directory
fs.readdir(directory, function(err, files) {
if (err) {
callback(err);
} else {
var originalFiles = files;
// Start watching the directory for new events
var watcher = fs.watch(directory, function(event, filename) {
// Get the updated list of all the files in the directory
fs.readdir(directory, function(err, files) {
if (err) {
callback(err);
} else {
// Filter out any files we already knew about
var newFiles = files.filter(function(f) {
return (originalFiles.indexOf(f) < 0);
});
// Reset our list of "original" files
originalFiles = files;
// If there are new files detected, call the callback
if (newFiles.length) {
callback(null, newFiles);
}
}
})
});
}
});
}
Then, to watch a directory you'd call it with:
watchForNewFiles(someDirectoryPath, function(err, files) {
if (err) {
// handle error
} else {
// handle any newly added files
// "files" is an array of filenames that have been added to the directory
}
});
I came up with my own solution using this code here:
var fs = require('fs');
var intID = setInterval(check,1000);
function check() {
fs.exists('file.txt', function check(exists) {
if (exists) {
console.log("Created!");
clearInterval(intID);
}
});
}
You could add a parameter to the check function with the name of the file and call it in the path.
I did some tests on fs.watch() and it does not work if the file is not created. fs.watch() has multiple issues anyways and I would never suggest using it... It does work to check if the file was deleted though...

node.js file system problems

I keep banging my head against the wall because of tons of different errors. This is what the code i try to use :
fs.readFile("balance.txt", function (err, data) //At the beginning of the script (checked, it works)
{
if (err) throw err;
balance=JSON.parse(data);;
});
fs.readFile("pick.txt", function (err, data)
{
if (err) throw err;
pick=JSON.parse(data);;
});
/*....
.... balance and pick are modified
....*/
if (shutdown)
{
fs.writeFile("balance2.txt", JSON.stringify(balance));
fs.writeFile("pick2.txt", JSON.stringify(pick));
process.exit(0);
}
At the end of the script, the files have not been modified the slightest. I then found out on this site that the files were being opened 2 times simultaneously, or something like that, so i tried this :
var balance, pick;
var stream = fs.createReadStream("balance.txt");
stream.on("readable", function()
{
balance = JSON.parse(stream.read());
});
var stream2 = fs.createReadStream("pick.txt");
stream2.on("readable", function()
{
pick = JSON.parse(stream2.read());
});
/****
****/
fs.unlink("pick.txt");
fs.unlink("balance.txt");
var stream = fs.createWriteStream("balance.txt", {flags: 'w'});
var stream2 = fs.createWriteStream("pick.txt", {flags: 'w'});
stream.write(JSON.stringify(balance));
stream2.write(JSON.stringify(pick));
process.exit(0);
But, this time, both files are empty... I know i should catch errors, but i just don't see where the problem is. I don't mind storing the 2 objects in the same file, if that can helps. Besides that, I never did any javascript in my life before yesterday, so, please give me a simple explanation if you know what failed here.
What I think you want to do is use readFileSync and not use readFile to read your files since you need them to be read before doing anything else in your program (http://nodejs.org/api/fs.html#fs_fs_readfilesync_filename_options).
This will make sure you have read both the files before you execute any of the rest of your code.
Make your like code do this:
try
{
balance = JSON.parse(fs.readFileSync("balance.txt"));
pick = JSON.parse(fs.readFileSync("pick.txt"));
}
catch(err)
{ throw err; }
I think you will get the functionality you are looking for by doing this.
Note, you will not be able to check for an error in the same way you can with readFile. Instead you will need to wrap each call in a try catch or use existsSync before each operation to make sure you aren't trying to read a file that doesn't exist.
How to capture no file for fs.readFileSync()?
Furthermore, you have the same problem on the writes. You are kicking off async writes and then immediately calling process.exit(0). A better way to do this would be to either write them sequentially asynchronously and then exit or to write them sequentially synchronously then exit.
Async option:
if (shutdown)
{
fs.writeFile("balance2.txt", JSON.stringify(balance), function(err){
fs.writeFile("pick2.txt", JSON.stringify(pick), function(err){
process.exit(0);
});
});
}
Sync option:
if (shutdown)
{
fs.writeFileSync("balance2.txt", JSON.stringify(balance));
fs.writeFileSync("pick2.txt", JSON.stringify(pick));
process.exit(0);
}

Resources