I currently have a NodeJS app running in PM2. The app displays data from a JSON file. The data is retrieved daily at 12 o'clock via a CURL command via system cron. In order for the data to load in the frontend I always have to execute a "pm2 reload ...". Which is quite annoying to me.
Anybody have an idea how I can solve the problem in the most elegant way? I have not worked with PM2 yet so my stupid question :)
Best
Not really PM2 related. When the JSON file changes you have to re-read the file into memory by importing/requiring it again. Use fs.watch to watch for file changes or add a built-in timer to reread file after midnight.
I am sorry for not answering. I have two possible solutions. I myself chose version 2 because there I do not need external cronjobs.
Option-1:
I call an API with CURL on host on midnight and write it to a JSON file. I read the file with fs.promises and watch with "chokidar" about file changes.
const fs = require('fs').promises,
path = require('path'),
chokidar = require('chokidar'),
filePath = path.join(__dirname, '../<PATH_TO_FILE/<FILE>.json');
watcher = chokidar.watch(filePath, { persistent: true });
watcher.on('change', () => {
getJSONData();
});
const getJSONData = async () => {
try {
let getFileData = await fs.readFile(filePath, { encoding: 'utf-8' });
return (erg = JSON.parse(getFileData));
} catch (error) {
console.log('Error when call json file.');
}
};
Option-2:
I call the API inside a function with the node_module "node-schedule".
require('dotenv').config();
const { <API_SERVICE_MODULE> } = require('<API_SERVICE_MODULE>'),
schedule = require('node-schedule'),
API_KEY = process.env.API_KEY,
LABEL = process.env.LABEL;
const API = new <API_SERVICE_MODULE>({
projectId: API_KEY,
});
const getDataFromAPI = async () => {
try {
const data = await API.<METADATA>(LABEL);
return data;
} catch (err) {
return 'Error with fetching data from API';
}
};
schedule.scheduleJob('*/10 * * * *', () => {
getDataFromAPI();
});
FYI: Both ways working with PM2
Related
I'm building multiple Nextjs apps for different subdomains of the same site. We have a REST API backend which has an app-info endpoint that gives me some important info about SEO and some more stuff. So I have to render these data on server side. The thing is these data won't change often (they are updated by an admin if needed) so There is no need to use getServerSideProps on every page or App.getInitialProps. I just need to call this endpoint every hour and create/update a json file based on these data. How can I achieve such behavior? By the way I am not deploying these sites using Vercel and we use our own servers.
I also tried this tutorial but it didn't work properly. It did run the worker.js file successfully but I couldn't open website within browser. After adding webpack part in next.config.js the website stopped working.
Note: I need to have access to env variables so I can't use this solution. And since we have multiple subdomains and every one of them has a test and production server, it will be so hard for me to use cron-job.org or similar solutions. I want to be able d to have the cron job alongside Next js and run it using a single command (npm start for example to run both the job and Next js server)
After playing around, I came up with a script which might not look ideal but this is the closest I could get to what I tried to achieve:
// cron-build.js
const { exec, spawn } = require("child_process")
const fs = require("fs-extra")
require("dotenv").config({
path: "./.env.production.local"
})
const isEqual = require("lodash.isequal")
const cron = require("node-cron")
const execPromise = (command) =>
new Promise((resolve) => {
exec(command, (err, stdout, stderr) => {
resolve({ err, stdout, stderr })
})
})
/**
* fetchs app info and then starts build procedure if appInfo.json does not exist or it differs from server.
*/
const build = async () => {
try {
console.log("fetching app info\n")
const appInfo = await fetchAppInfo()
let currentAppInfo = undefined
try {
currentAppInfo = await fs.readJSON("./src/appInfo.json")
} catch (e) {
}
if (isEqual(appInfo, currentAppInfo)) {
console.log("No change found in app info\n")
rest(true)
return
}
fs.writeJson("./src/appInfo.json", appInfo)
} catch (e) {
console.log(e)
throw e
}
console.log("Change detected in app info. Rebuilding application...\n")
const buildProcess = spawn("npm", ["run", "build"])
buildProcess.addListener("error", (data) => console.log("error", data))
buildProcess.on("exit", (exitCode) => {
if (exitCode === 0) {
console.log("Successful build. Restarting server...\n")
rest()
} else {
console.error(
"Build failed. Run `npm run build` in your terminal to see the logs"
)
}
})
buildProcess.on("message", (data) => console.log("message", data))
buildProcess.stdout.on("data", (data) => console.log(`${data}`))
}
/**
* The rest of the build process (killing current running server and restarting server)
* #param {boolean} noRestart If set to true, only starts the server and won't kill already running server
* #returns
*/
const rest = async (noRestart) => {
const { err: err2, stdout } = await execPromise(
`sudo ss -lptn 'sport = :${process.env.SERVER_PORT || 8080}'`
)
if (err2) {
console.log(err2)
return
}
const pid = stdout
.toString()
.match(/pid=\d+/)?.[0]
?.split("=")?.[1]
if (pid) {
if (noRestart) return
const { err } = execPromise(`sudo kill ${pid}`)
if (err) {
console.log("failed to kill current running server. error:", err)
return
}
}
const server = spawn("npx", ["next", "start", "-p", process.env.SERVER_PORT || 8080])
server.stdout.on("data", (data) => console.log(`${data}`))
server.stdout.on("error", (data) => console.log(`${data}`))
server.on("close", () => server.removeAllListeners())
}
build()
cron.schedule("0 0 * * * *", build)
This script fetches the app-info from backend and will rebuild the project if data differs from current data using exec and spawn (in fact I run next scripts manually and from Node js). Run this script using sudo node cron-build.js and you can simply import the output json file inside all your components and pages since this data is available during build time and thus will be compiled inside the project.
Ultimately, I'd like to have an extra feature in my app if the app is running on AWS EC2.
How do I check and set a variable to indicate if it is on AWS or not? I found this thread to do the check, but upon startup how do I set a variable across the app like a boolean? Something like:
let checkAWS;
metadata.isEC2().then(function (onEC2) {
checkAWS = true;
console.log("EC2: " + onEC2);
});
let app = express();
app.locals.isAWS = checkAWS;
console.log(checkAWS);
Every time, I always get the same output:
undefined
EC2: true
I am using the isAWS variable in my .ejs file to decide on that functionality.
metadata.isEC2() is asynchronous so its value is available some time later when the .then() handler runs. Meanwhile, you're trying to use the value of checkAWS BEFORE that .then() handler has run.
Since you want access to that value before you start your Express sever, you could only start it inside the .then() handler like this:
let isAWS;
metadata.isEC2().then(function (onEC2) {
console.log("EC2: " + onEC2);
isAWS = true;
}).catch(err => {
isAWS = false;
}).finally(err => {
const app = express();
app.locals.isAWS = isAWS;
// do the rest of your app initialization here
app.listen(...);
});
// do not use app here (it won't be defined here)
If you're trying to export app for other modules to use, you will have to take a slightly different approach. We'd have to see your broader code context to know what to recommend for that. But, basically the idea here is that you shouldn't start your server until you have the asynchronously retrieved isAWS result.
Also, you should know that with that metadata.isEC2() call you're using, that looks for a known endpoint in an EC2 instance to detect EC2. If that endpoint does not exist, then this will take 500ms to timeout and hit the .catch() branch above. If it is an EC2 instance, it will be quick.
Note, it might be simpler to just check for the presence of some environment variables that are automatically set by the AWS environment such as AWS_REGION or AWS_EXECUTION_ENV. Those can be checked for synchronously.
let app = express();
app.locals.isAWS = !!process.env.AWS_REGION;;
For future reference in case anyone else is looking, this is what worked for me in order to be able to tell if you are running on AWS.
let isAwsSet = false; //has the app checked whether it is running on AWS by setting app.locals.isAWS
app.locals.isAWS = false;
app.get('/home', function(request, response) {
if (!isAwsSet){
urlExists().then(function (onAWS){
app.locals.isAWS = true;
isAwsSet = true;
response.render('home');
}).catch(function(error){
app.locals.isAWS = false;
isAwsSet = true;
response.render('home');
})
} else {
response.render('home');
}
});
function urlExists() {
return new Promise((resolve, reject) => {
//https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/instancedata-data-retrieval.html
const options = {
method: 'HEAD',
host: '169.254.169.254',
path: '/latest/meta-data/',
port: 80,
timeout: 1500
};
const req = http.request(options, (res) => {
// reject on bad status
if (res.statusCode !== 200) {
return reject(new Error('statusCode=' + res.statusCode));
}
resolve(res.statusCode = 200);
});
req.on('timeout', () => {
req.destroy();
});
req.on('error', function(err) {
reject(err);
});
req.end();
});
}
I'm currently trying to implement a live file feed in which writes data to the connected client in the web. This works perfectly fine if I'm editing the document directly, updates are sent immediately.
Instead of me manually writing to the file, i created another process to handle that step. The issue I'm having is that when my process writes to this file, the changes are not being detected. Unless i explicitly open the file,the changes made are not being detected.
I also know that fs.watch is inconsistent, but what would be the difference between manually editing and automation?
// function to start process and check for changes in file
const start = function() {
fs.open(file, 'r', (err, fd) => {
if(err){
return setTimeout(start, 1000)
}
fs.watch(file,(event, filename) => {
if(event === "change"){
console.log('change detected');
// function that sends client messages
sendMessage(fd);
}
});
});
}
Here is the code that automates the process of writing to the file:
const fs = require('fs');
const file = 'file.txt';
const writeStream = fs.createWriteStream(file, {
flags:"a"
});
const cleanBuffer = function(len) {
let buf = Buffer.alloc(len);
buf.fill('\0');
return buf;
}
const check = function() {
let newData = `data being written`;
const buffer = cleanBuffer(newData.length);
buffer.write(newData, 'ascii');
writeStream.write(buffer);
setTimeout(check, 10000);
}
I tried to view the file from file explorer and whenever I access the folder this file is contained in, the change is detected...is this actually watching the file?
I did some research and it looks as though w/ windows this is working as expected because
On Windows systems, this feature depends on ReadDirectoryChangesW
I had to change the function to use fs.watchFile instead, which is working for me although it is recommended to use fs.watch.
More can be read here: https://nodejs.org/docs/latest-v11.x/api/fs.html#fs_availability
The code now reflects as:
// function to start process and check for changes in tweets file
const start = function() {
fs.open(file, 'r', (err, fd) => {
if(err){
return setTimeout(start, 1000)
}
fs.watchFile(file,(curr, prev) => {
if(curr.mtime !== prev.mtime){
console.log('change detected');
sendMessage(fd);
}
});
});
}
I am trying to use file creation and deletion as a method of data transfer (not the best way, I know.) between python and nodejs. The python side of the program works fine, as I am quite familiar with python 3, but I can't get the node.js script to work.
I've tried various methods of detecting when a file is created, mainly with the use of try {} catch {}, but none of them have worked.
function fufillRequest(data) {
fs.writeFile('Response.txt', data)
}
while(true) {
try {
fs.readFile('Request.txt', function(err,data) {
console.log(data);
});
} catch {
}
}
The program is supposed to see that the file has been created, read it's contents, delete it and then create and write to a response file.
#jfriend00 solution is correct. However, In the above solution. It never cleans the timeout. It may cause an issue. If u need blocking code and better timer handling u can use setInterval.
Sample:
const checkTime = 1000;
var fs = require("fs");
const messageFile = "test.js";
const timerId = setInterval(() => {
const isExists = fs.existsSync(messageFile, 'utf8')
if(isExists) {
// do something here
clearInterval(timerId)
}
}, checkTime)
You can also run your python program. No need to write another script.
const spawn = require("child_process").spawn;
const proc = spawn('python',["./watch.py"]);
proc.stdout.on('data', (data) => console.log(data.toString()))
proc.stderr.on('data', (data) => console.log(data.toString()))
You can either user a recurring timer or fs.watch() to monitor when the file appears.
Here's what it would look like with a recurring timer:
const checkTime = 1000;
const fs = require('fs');
function check() {
setTimeout(() => {
fs.readFile('Request.txt', 'utf8', function(err, data) {
if (err) {
// got error reading the file, call check() again
check();
} else {
// we have the file contents here, so do something with it
// can delete the source file too
}
});
}, checkTime)
}
check();
Note: Whatever process is creating this file should probably use an exclusive access mode when writing so that you don't create a race condition where it starts reading before the other process is done writing.
I built a nodejs server to act as an adapter server, which upon receiving a post request containing some data, extracts the data from the request body and then forwards it to a few other external servers. Finally, my server will send a response consisting of the responses from each of the external server (success/fail).
If there's only 1 endpoint to forward to, it seems fairly straightforward. However, when I have to forward to more than one servers, I have to rely on things like Promise.All(), which has a fail-fast behaviour. That means if one promise is rejected (an external server is down), all other promises will also be rejected immediately and the rest the servers will not receive my data.
May be this ain't be the exact solution. But, what I am posting could be the work around of your problem.
Few days back I had the same problem, as I wanted to implement API versioning. Here is the solution I implemented, please have a look.
Architecture Diagram
Let me explain this diagram
Here in the diagram is the initial configuration for the server as we do. all the api request come here will pass to the "index.js" file inside the release directory.
index.js (in release directory)
const express = require('express');
const fid = require('./core/file.helper');
const router = express.Router();
fid.getFiles(__dirname,'./release').then(releases => {
releases.forEach(release => {
// release = release.replace(/.js/g,'');
router.use(`/${release}`,require(`./release/${release}/index`))
})
})
module.exports = router
code snippet for helper.js
//requiring path and fs modules
const path = require('path');
const fs = require('fs');
module.exports = {
getFiles: (presentDirectory, directoryName) => {
return new Promise((resolve, reject) => {
//joining path of directory
const directoryPath = path.join(presentDirectory, directoryName);
//passsing directoryPath and callback function
fs.readdir(directoryPath, function (err, files) {
// console.log(files);
//handling error
if (err) {
console.log('Unable to scan directory: ' + err);
reject(err)
}
//listing all files using forEach
// files.forEach(function (file) {
// // Do whatever you want to do with the file
// console.log(file);
// });
resolve(files)
});
})
}
}
Now, from this index file all the index.js inside each version folder is mapped
Here is the code bellow for "index.js" inside v1 or v2 ...
const express = require('express');
const mongoose = require('mongoose');
const fid = require('../../core/file.helper');
const dbconf = require('./config/datastore');
const router = express.Router();
// const connection_string = `mongodb+srv://${dbconf.atlas.username}:${dbconf.atlas.password}#${dbconf.atlas.host}/${dbconf.atlas.database}`;
const connection_string = `mongodb://${dbconf.default.username}:${dbconf.default.password}#${dbconf.default.host}:${dbconf.default.port}/${dbconf.default.database}`;
mongoose.connect(connection_string,{
useCreateIndex: true,
useNewUrlParser:true
}).then(status => {
console.log(`Database connected to mongodb://${dbconf.atlas.username}#${dbconf.atlas.host}/${dbconf.atlas.database}`);
fid.getFiles(__dirname,'./endpoints').then(files => {
files.forEach(file => {
file = file.replace(/.js/g,'');
router.use(`/${file}`,require(`./endpoints/${file}`))
});
})
}).catch(err => {
console.log(`Error connecting database ${err}`);
})
module.exports = router
In each of this index.js inside version folder is actually mapped to each endpoints inside endpoints folder.
code for one of the endpoints is given bellow
const express = require('express');
const router = express.Router();
const userCtrl = require('../controllers/users');
router.post('/signup', userCtrl.signup);
router.post('/login', userCtrl.login);
module.exports = router;
Here in this file actually we are connecting the endpoints to its controllers.
var config = {'targets':
[
'https://abc.api.xxx',
'https://xyz.abc',
'https://stackoverflow.net'
]};
relay(req, resp, config);
function relay(req, resp, config) {
doRelay(req, resp, config['targets'], relayOne);
}
function doRelay(req, resp, servers, relayOne) {
var finalresponses = [];
if (servers.length > 0) {
var loop = function(servers, index, relayOne, done) {
relayOne(req, servers[index], function(response) {
finalresponses.push[response];
if (++index < servers.length) {
setTimeout(function(){
loop(servers, index, relayOne, done);
}, 0);
} else {
done(resp, finalresponses);
}
});
};
loop(servers, 0, relayOne, done);
} else {
done(resp, finalresponses);
}
}
function relayOne(req, targetserver, relaydone) {
//call the targetserver and return the response data
/*return relaydone(response data);*/
}
function done(resp, finalresponses){
console.log('ended');
resp.writeHead(200, 'OK', {
'Content-Type' : 'text/plain'
});
resp.end(finalresponses);
return;
}
It sounds like you are trying to design a reverse proxy. If you are struggling to get custom code to work, there is a free npm library which is very robust.
I would recommend node-http-proxy
I have posted link below, which will lead you directly to the "modify response", since you mentioned modification of the API format in your question. Be sure to read the entire page though.
https://github.com/http-party/node-http-proxy#modify-a-response-from-a-proxied-server
Note: this library is also very good because it can support SSL, and proxies to both localhost (servers on the same machine) and servers on other machines (remote).
Promise.all() from MDN
It rejects with the reason of the first promise that rejects.
To overcome the problem, you'll need to catch() each request you've made.
e.g.
Promise.all([
request('<url 1>').catch(err => /* .. error handling */),
request('<url 2>').catch(err => /* .. error handling */),
request('<url 3>').catch(err => /* .. error handling */)
])
.then(([result1, result2, result3]) => {
if(result1.err) { }
if(result2.err) { }
if(result3.err) { }
})