Wait until file is created to read it in node.js - node.js

I am trying to use file creation and deletion as a method of data transfer (not the best way, I know.) between python and nodejs. The python side of the program works fine, as I am quite familiar with python 3, but I can't get the node.js script to work.
I've tried various methods of detecting when a file is created, mainly with the use of try {} catch {}, but none of them have worked.
function fufillRequest(data) {
fs.writeFile('Response.txt', data)
}
while(true) {
try {
fs.readFile('Request.txt', function(err,data) {
console.log(data);
});
} catch {
}
}
The program is supposed to see that the file has been created, read it's contents, delete it and then create and write to a response file.

#jfriend00 solution is correct. However, In the above solution. It never cleans the timeout. It may cause an issue. If u need blocking code and better timer handling u can use setInterval.
Sample:
const checkTime = 1000;
var fs = require("fs");
const messageFile = "test.js";
const timerId = setInterval(() => {
const isExists = fs.existsSync(messageFile, 'utf8')
if(isExists) {
// do something here
clearInterval(timerId)
}
}, checkTime)
You can also run your python program. No need to write another script.
const spawn = require("child_process").spawn;
const proc = spawn('python',["./watch.py"]);
proc.stdout.on('data', (data) => console.log(data.toString()))
proc.stderr.on('data', (data) => console.log(data.toString()))

You can either user a recurring timer or fs.watch() to monitor when the file appears.
Here's what it would look like with a recurring timer:
const checkTime = 1000;
const fs = require('fs');
function check() {
setTimeout(() => {
fs.readFile('Request.txt', 'utf8', function(err, data) {
if (err) {
// got error reading the file, call check() again
check();
} else {
// we have the file contents here, so do something with it
// can delete the source file too
}
});
}, checkTime)
}
check();
Note: Whatever process is creating this file should probably use an exclusive access mode when writing so that you don't create a race condition where it starts reading before the other process is done writing.

Related

why does ending a write stream clears data that was previously written to that file in nodeJS

i was trying to read content of one file and write it to another using nodeJS.everything works if i do not add fileStream.end(). Below is the code i have written:
const fs= require ('fs');
const readLine = require('readline');
//read file
const myInterface = readLine.createInterface({input:fs.createReadStream('./input/file.txt'),output: fs.createWriteStream('output.txt')});
//print to output
const fileStream=fs.createWriteStream('output1.txt');
//function to copy
let copyData=(line)=>{
//console.log(line);
fileStream.write(`copied: ${line}\n`);
}
//copy file
myInterface.on('line',copyData);
fileStream.end();
//print to console
fs.readFile('output1.txt','utf-8',(err,data)=>{
if(err)
console.log(`ERROR: ${err}`)
else
console.log(`readFile: ${data}`)
})
Thanks in advance! added the picture of the terminal output
You're calling filestream.end() synchronously before any of the reading and writing has completed.
When you do this:
myInterface.on('line',copyData);
you're just setting up an event handler and sometime IN THE FUTURE, the function copyData will be called with each line from the file. But, before any of that even happens, you call:
filestream.end()
which shuts down the writestream before anything has been written to it.
Similarly, you're also calling fs.readFile() before the reading/writing has completed.
This is all an event driven system. When you set up asynchronous operations like you have here and you want to do something upon completion of the operation, then you can't just call that code synchronously like you are here because the asynchronous events won't have finished yet. Instead, you need to register for completion events and trigger your "after" work on the completion events.
In this case, you can listen for the close event on the input stream.
const fs = require('fs');
const readLine = require('readline');
//read file
const myInterface = readLine.createInterface({ input: fs.createReadStream('./input/file.txt')});
//print to output
const fileStream = fs.createWriteStream('output1.txt');
//function to copy
let copyData = (line) => {
//console.log(line);
fileStream.write(`copied: ${line}\n`);
}
//copy file
myInterface.on('line', copyData);
// see when the read stream is done
myInterface.on('close', () => {
fileStream.end();
//print to console
fs.readFile('output1.txt', 'utf-8', (err, data) => {
if (err)
console.log(`ERROR: ${err}`)
else
console.log(`readFile: ${data}`)
})
});
// listen for errors too
myInterface.on('error', err => {
console.log(err);
});

NodeJS App with PM2 and daily changed json file

I currently have a NodeJS app running in PM2. The app displays data from a JSON file. The data is retrieved daily at 12 o'clock via a CURL command via system cron. In order for the data to load in the frontend I always have to execute a "pm2 reload ...". Which is quite annoying to me.
Anybody have an idea how I can solve the problem in the most elegant way? I have not worked with PM2 yet so my stupid question :)
Best
Not really PM2 related. When the JSON file changes you have to re-read the file into memory by importing/requiring it again. Use fs.watch to watch for file changes or add a built-in timer to reread file after midnight.
I am sorry for not answering. I have two possible solutions. I myself chose version 2 because there I do not need external cronjobs.
Option-1:
I call an API with CURL on host on midnight and write it to a JSON file. I read the file with fs.promises and watch with "chokidar" about file changes.
const fs = require('fs').promises,
path = require('path'),
chokidar = require('chokidar'),
filePath = path.join(__dirname, '../<PATH_TO_FILE/<FILE>.json');
watcher = chokidar.watch(filePath, { persistent: true });
watcher.on('change', () => {
getJSONData();
});
const getJSONData = async () => {
try {
let getFileData = await fs.readFile(filePath, { encoding: 'utf-8' });
return (erg = JSON.parse(getFileData));
} catch (error) {
console.log('Error when call json file.');
}
};
Option-2:
I call the API inside a function with the node_module "node-schedule".
require('dotenv').config();
const { <API_SERVICE_MODULE> } = require('<API_SERVICE_MODULE>'),
schedule = require('node-schedule'),
API_KEY = process.env.API_KEY,
LABEL = process.env.LABEL;
const API = new <API_SERVICE_MODULE>({
projectId: API_KEY,
});
const getDataFromAPI = async () => {
try {
const data = await API.<METADATA>(LABEL);
return data;
} catch (err) {
return 'Error with fetching data from API';
}
};
schedule.scheduleJob('*/10 * * * *', () => {
getDataFromAPI();
});
FYI: Both ways working with PM2

Why does fs.watch only trigger when I open the file being watched?

I'm currently trying to implement a live file feed in which writes data to the connected client in the web. This works perfectly fine if I'm editing the document directly, updates are sent immediately.
Instead of me manually writing to the file, i created another process to handle that step. The issue I'm having is that when my process writes to this file, the changes are not being detected. Unless i explicitly open the file,the changes made are not being detected.
I also know that fs.watch is inconsistent, but what would be the difference between manually editing and automation?
// function to start process and check for changes in file
const start = function() {
fs.open(file, 'r', (err, fd) => {
if(err){
return setTimeout(start, 1000)
}
fs.watch(file,(event, filename) => {
if(event === "change"){
console.log('change detected');
// function that sends client messages
sendMessage(fd);
}
});
});
}
Here is the code that automates the process of writing to the file:
const fs = require('fs');
const file = 'file.txt';
const writeStream = fs.createWriteStream(file, {
flags:"a"
});
const cleanBuffer = function(len) {
let buf = Buffer.alloc(len);
buf.fill('\0');
return buf;
}
const check = function() {
let newData = `data being written`;
const buffer = cleanBuffer(newData.length);
buffer.write(newData, 'ascii');
writeStream.write(buffer);
setTimeout(check, 10000);
}
I tried to view the file from file explorer and whenever I access the folder this file is contained in, the change is detected...is this actually watching the file?
I did some research and it looks as though w/ windows this is working as expected because
On Windows systems, this feature depends on ReadDirectoryChangesW
I had to change the function to use fs.watchFile instead, which is working for me although it is recommended to use fs.watch.
More can be read here: https://nodejs.org/docs/latest-v11.x/api/fs.html#fs_availability
The code now reflects as:
// function to start process and check for changes in tweets file
const start = function() {
fs.open(file, 'r', (err, fd) => {
if(err){
return setTimeout(start, 1000)
}
fs.watchFile(file,(curr, prev) => {
if(curr.mtime !== prev.mtime){
console.log('change detected');
sendMessage(fd);
}
});
});
}

Clients for a REPL server do not display the server's input and output properly

I wrote a small REPL server and client for debugging the application I'm writing. When I input enough text to make it wrap to the next line in the terminal and press backspace though, it will display all the lines, not just the one that updated. I also can't move back to previous lines once I no longer have any characters to delete. There has to be a way to do this if node is reasonably sane, but I'm at a complete loss as to what to change. How can I make this work?
Edit: tab complete does not display until after hitting enter. Lines from history doesn't display in process.stdin at all, but will still be evaluated after sending them
repl-client.js
'use strict';
const net = require('net');
const socket = net.connect(8001, '0.0.0.0');
process.stdin.pipe(socket);
socket.pipe(process.stdout);
process.stdin.on('finish', () => {
socket.destroy('');
});
repl-server.js
import * as babel from 'babel-core';
import net from 'net';
import repl from 'repl';
import vm from 'vm';
function extendContext(context) {
// add vars I need to context
return context;
}
function babelEval(input, filename) {
let sanitizedInput = (input.startsWith('(') && input.endsWith(')'))
? input.slice(1, -1)
: input;
let code = babel.transform(sanitizedInput).code;
return vm.runInThisContext(code, {filename});
}
export default function createREPLServer(port, hostname) {
return net.createServer((socket) => {
const _repl = repl.start({
prompt: 'test> ',
input: socket,
output: socket,
terminal: true,
useColors: true,
useGlobal: true,
replMode: repl.REPL_MODE_STRICT,
eval(input, context, filename, cb) {
let err;
let code;
try {
code = babelEval(input.trim(), filename);
} catch (e) {
err = e;
}
cb(err, code);
}
});
_repl.on('reset', (context) => extendContext(context));
_repl.on('exit', () => socket.end());
extendContext(_repl.context);
}).listen(8001, '0.0.0.0');
}
I managed to fix this in part with help of 小太郎 and by adding a readable event handler to repl-server.js:
socket.on('readable', (chunk) => {
socket.read(chunk);
});

NodeJs are module loaded in order?

I use the module tough-cookie-filestore which saves cookies to a local file. When setting request to use this cookie jar, it requires that the file already exists on disk. As I use this cookie jar in multiple modules, I want to avoid a big block of code at the top of my modules which checks if the cookie file exists and if not creates it, and so I made a module initcookie.js that does this.
My question is, is this a safe and good way to do this? initcookie.init() makes sure that the file exists, but can I be sure that it is run before new FileCookieStore(config.cookiePath) is executed?
var initcookie = require('../initcookie.js').init()
var config = require('../config.js')
var FileCookieStore = require('tough-cookie-filestore')
var request = require('request')
var j = request.jar(new FileCookieStore(config.cookiePath))
request = request.defaults({ jar: j })
Where initcookie.js is:
var config = require('./config.js')
var fs = require('fs')
// Initialize cookie file which stores the login info
exports.init = function () {
try {
fs.openSync(config.cookiePath, 'r')
} catch (e) {
if (e.code === 'ENOENT') {
// File not found, so make one
fs.writeFileSync(config.cookiePath, '', { flags: 'wx' }, function (err) {
if (err) { throw (err) }
})
} else {
throw (e)
}
}
}
This way will work, but isn't the best way to do this. fs.writeFileSync and fs.openSync will make sure that your code executes synchronously, but it would be better to use async so you aren't holding up the thread. You could write
var config = require('./config.js')
var fs = require('fs')
// Initialize cookie file which stores the login info
exports.init = function () {
return new Promise(function (resolve, reject) {
try {
fs.openSync(config.cookiePath, 'r')
resolve()
} catch (e) {
if (e.code === 'ENOENT') {
// File not found, so make one
fs.writeFile(config.cookiePath, '', { flags: 'wx' }, function (err) {
if (err) { reject(err) }
else { resolve() }
})
} else {
reject(e)
}
}
}
}
This way you can use Promises in your other files and be sure cookiePath is created without holding up the thread.
The only issue I could see you running into would be if async leads you to run this function a bunch of times before it completes and so create a race condition. To avoid that I would only call this function once at the beginning of the program and put everything else in the success function of that promise.

Resources