Looping a perpetual UDP packet ping in Node.js (3s interval) - node.js

I am trying to build a loop that my server will run, pinging an arduino which will return data when it receives a packet from my IP. I already have the mongoDB collection working for the data and am just stuck at this point where I would like my server to begin pinging the arduino every few seconds from start up.
Note: this is really my first crack at really understanding async JS and is likely just a noob mistake.
Code:
const dgram = require('dgram');
const message = Buffer.from('Some bytes');
const clientPing = dgram.createSocket('udp4');
const pinging = true;
function ping() {
return new Promise ((resolve, reject) => {
// This chunk
clientPing.send("Hi Uno", 8888, 'xxx.xx.xx.xxx', (err) => {
clientPing.close();
});
// When run on it's own successfully pings the arduino
const error = false;
if(error == true) {
reject('Error');
} else {
resolve('Pinged');
}
});
}
//Working ping
ping().then().catch(err => console.log(err));
All of this above was meant to act as just a simple proof that the promise does work and successfully emits a udp packet to the target.
function loop() {
while(pinging == true) {
setTimeout(() => {
ping().then().catch(err => console.log(err));
console.log('Pinged');
}, 3000);
}
}
The rest of these are just different combinations of how I've tried to solve the problem here. As far as I can tell I don't have a memory leak as the loop runs just fine without increasing resource consumption.
function loop() {
console.log("entered loop")
while(pinging == true) {
setTimeout(() => {
clientPing.send("Hi Uno", 8888, 'xxx.xx.xx.xxx', (err) => {
clientPing.close();
});
console.log('Pinged');
}, 3000);
}
}
This one was intended to run just the code for the ping without using the promise but still doesn't yield the desired result. The console.log is printed above but it doesn't seem to ever trigger the timeout.
loop();
This just runs one of the two loop() functions.
do {
setTimeout(() => {
ping().then().catch(err => console.log(err));
console.log("pinged");
}, 2000)
} while(pinging == true)
Lastly I thought I'd try do but also without success.
If all of these are pasted into a document sequentially it builds my file. Simply uncomment/comment out each section as needed.
Am I just missing something obvious or is there something fairly complex here that's stopping this from working?
Thanks!

Your ping function is written incorrectly. It is resolving promise just after sending ping not on callback of it. Please change it like this.
function ping() {
return new Promise ((resolve, reject) => {
// This chunk
clientPing.send("Hi Uno", 8888, 'xxx.xx.xx.xxx', (err) => {
const error = false;
clientPing.close();
if(error == true) {
reject('Error');
} else {
resolve('Pinged');
}
});
});
}
And for looping if you shouldn't do sync loop it won't wait until setTimeout finished or your function has finished executing.
For async looping you can loop it by async await
for example:
function wait () {
return new Promise((resolve, reject)=> {
setTimeout(() => {
resolve()
}, 3000)
})
}
for (let i = 0; i<n; i++) {
await ping();
await wait();
}

Related

Nodejs TCP server process packets in order

I am using a tcp server to recieve and process packets in Node.js. It should recieve 2 packets:
"create" for creating an object in a database. It first checks if the object already exists and then creates it. (-> takes some time process)
"update" for updating the newly created object in the database
For the sake of simplicity, we'll just assume the first step always takes longer than the second. (which is always true in my original code)
This is a MWE:
const net = require("net");
const server = net.createServer((conn) => {
conn.on('data', async (data) => {
console.log(`Instruction ${data} recieved`);
await sleep(1000);
console.log(`Instruction ${data} done`);
});
});
server.listen(1234);
const client = net.createConnection(1234, 'localhost', async () => {
client.write("create");
await sleep(10); // just a cheap workaround to "force" sending 2 packets instead of one
client.write("update");
});
// Just to make it easier to read
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
If i run this code i get:
Instruction create recieved
Instruction update recieved
Instruction create done
Instruction update done
But i want the "create" instruction to block the conn.on('data', func) until the last callback returns asynchronously. The current code tries to update an entry before it is created in the database which is not ideal.
Is there an (elegant) way to achieve this? I suspect some kind of buffer which stores the data and a worker loop of some kind which processes the data? But how do i avoid running an infinite loop which blocks the event loop? (Event loop is the correct term, is it?)
Note: I have a lot more logic to handle fragmentation, etc. But this explains the issue i'm having.
I managed to get it to work with the package async-fifo-queue.
It's not the cleanest solution but it should do what i want and as efficient as possible (using async/await instead of just looping infinitely).
Code:
const net = require("net");
const afq = require("async-fifo-queue");
const q = new afq.Queue();
const server = net.createServer((conn) => {
conn.on('data', q.put.bind(q));
});
server.listen(1234);
const client = net.createConnection(1234, 'localhost', async () => {
client.write("create");
await sleep(10);
client.write("update");
});
(async () => {
while(server.listening) {
const data = await q.get();
console.log(`Instruction ${data} recieved`);
await sleep(1000);
console.log(`Instruction ${data} done`);
}
})();
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}
You can pause the socket when you get the "create" event. After it finishes, you can resume the socket. Example:
const server = net.createServer((conn) => {
conn.on('data', async (data) => {
if (data === 'create') {
conn.pause()
}
console.log(`Instruction ${data} recieved`);
await sleep(1000);
console.log(`Instruction ${data} done`);
if (data === 'create') {
conn.resume()
}
});
});
server.listen(1234);
const client = net.createConnection(1234, 'localhost', async () => {
client.write("create");
await sleep(10); // just a cheap workaround to "force" sending 2 packets instead of one
client.write("update");
});
// Just to make it easier to read
function sleep(ms) {
return new Promise((resolve) => {
setTimeout(resolve, ms);
});
}

Problem getting puppeteer-cluster waiting on page event before closing

I'm currently setting up a CI environment to automate e2e tests our team runs in a test harness. I am setting this up on Gitlab and currently using Puppeteer. I have an event that fires from our test harness that designates when the test is complete. Now I am trying to "pool" the execution so I don't use up all resources or run out of listeners. I decided to try out "puppeteer-cluster" for this task. I am close to having things working, however I can't seem to get it to wait for the event on page before closing the browser. Prior to using puppeteer-cluster, I was passing in a callback to my function and when the custom event was fired (injected via exposeFunction), I would go about calling it. That callback function is now being passed in data though now and therefore not waiting. I can't seem to find a way to get the execution to wait and was hoping someone might have an idea here. If anyone has any recommendations, I'd love to hear them.
test('Should launch the browser and run e2e tests', async (done) => {
try {
const cluster = await Cluster.launch({
concurrency: Cluster.CONCURRENCY_CONTEXT,
maxConcurrency: 10,
monitor: false,
timeout: 1200000,
puppeteerOptions: browserConfig
});
// Print errors to console
cluster.on("taskerror", (err, data) => {
console.log(`Error crawling ${data}: ${err.message}`);
});
//Setup our task to be run
await cluster.task( async ({page, data: {testUrl, isLastIndex, cb}, worker}) => {
console.log(`Test starting at url: ${testUrl} - isLastIndex: ${isLastIndex}`);
await page.goto(testUrl);
await page.waitForSelector('#testHarness');
await page.exposeFunction('onCustomEvent', async (e) => {
if (isLastIndex === true){ ;
//Make a call to our callback, finalizing tests are complete
cb();
}
console.log(`Completed test at url: ${testUrl}`);
});
await page.evaluate(() => {
document.addEventListener('TEST_COMPLETE', (e) => {
window.onCustomEvent('TEST_COMPLETE');
console.log("TEST COMPLETE");
});
});
});
//Perform the assignment of all of our xml tests to an array
let arrOfTests = await buildTestArray();
const arrOfTestsLen = arrOfTests.length;
for( let i=0; i < arrOfTestsLen; ++i){
//push our tests on task queue
await cluster.queue( {testUrl: arrOfTests[i], isLastIndex: (i === arrOfTestsLen - 1), cb: done });
};
await cluster.idle();
await cluster.close();
} catch (error) {
console.log('ERROR:',error);
done();
throw error;
}
});
So I got something working, but it really feels hacky to me and I'm not really sure it is the right approach. So should anyone have the proper way of doing this or a more recommended way, don't hesitate to respond. I am posting here shoudl anyone else deal with something similar. I was able to get this working with a bool and setInterval. I have pasted working result below.
await cluster.task( async ({page, data: {testUrl, isLastIndex, cb}, worker}) => {
let complete = false;
console.log(`Test starting at url: ${testUrl} - isLastIndex: ${isLastIndex}`);
await page.goto(testUrl)
await page.waitForSelector('#testHarness');
await page.focus('#testHarness');
await page.exposeFunction('onCustomEvent', async (e) => {
console.log("Custom event fired");
if (isLastIndex === true){ ;
//Make a call to our callback, finalizing tests are complete
cb();
complete = true;
//console.log(`VAL IS ${complete}`);
}
console.log(`Completed test at url: ${testUrl}`);
});
//This will run on the actual page itself. So setup an event listener for
//the TEST_COMPLETE event sent from the test harness itself
await page.evaluate(() => {
document.addEventListener('TEST_COMPLETE', (e) => {
window.onCustomEvent('TEST_COMPLETE');
});
});
await new Promise(resolve => {
try {
let timerId = setInterval(()=>{
if (complete === true){
resolve();
clearInterval(timerId);
}
}, 1000);
} catch (e) {
console.log('ERROR ', e);
}
});
});

Watch an API for Updates

I am trying to make an API watch to https://api.exchangeratesapi.io/latest
and detect if any changes so I can update my clients just like a web hook notification. But what am doing is
while(true){
fetch(https://api.exchangeratesapi.io/latest)
.then(res => ......
}
I am caching the result and always check if there is any changes, If there is, I will send a request to the client.
I am looking for a better way to pull the data instead of making a while loops
If you use a while loop, you would potentially send many requests before one returns. And even when one returns, it's not going to necessarily be in order. Here is a quick example of what might happen if there is a network spike for example:
const sleep = ms => new Promise(res => setTimeout(res, ms));
async function mockNetworkCall(num) {
const timings = [50, 150, 50]
console.log(`sending request ${num}`);
await sleep(timings[num]);
console.log(`request ${num} finished`)
}
for(let i = 0; i < 3; i++) {
mockNetworkCall(i);
}
You could avoid the while loop if you instead take the approach to only do a new request when the last one finishes. In that case, you will only have a single request active at any one time and you know that you're getting the result in order.
You can wrap the logic for that in a simple function to watch a URL and only re-initiate a request when the previous one is finished. This is the skeleton for such a function - it might need tweaking according to your needs:
function watch({ url, retryCount, lastResponse = null}) {
fetch(url)
.then(res => {
/* do something */
return res;
})
.then(res => watch({url, retryCount, lastResponse: res})) //launch again
.catch(err => {
/* handle error */
console.error("error getting URL", err);
console.warn("retries left: ", retryCount);
if (retryCount-- > 0) {
watch({url, retryCount, lastResponse});
}
})
}
watch({url: "google.com", retryCount: 3});
Or the same using async/await
async function watch({ url, retryCount, lastResponse = null}) {
try {
const res = await fetch(url);
/* do something */
watch({url, retryCount, lastResponse: res}); //launch again
} catch (err) {
/* handle error */
console.error("error getting URL", err);
console.warn("retries left: ", retryCount);
if (retryCount-- > 0) {
watch({url, retryCount, lastResponse});
}
}
}
watch({url: "google.com", retryCount: 3});

How to handle an HTTP request that stops sending data

I'm trying to download videos from some URLs and for some reason the downloads will sometimes stop before they're complete - as in res.on('data', ...) will stop being called (using https.get) before the response has completed. I would like to be able to detect this and retry the request after a given time, but I'm having a hard time getting working code for it. I'm hoping there would be a built in way to deal with something like this that I'm missing. I've already tried setting a timeout for the request and setting a timeout for the response and neither of those seem to do the trick.
Current attempt:
async function downloadVideo(send) {
try {
https.get(downloadUrl, res => {
let timerId;
res.on(`data`, (c) => {
clearTimeout(timerId);
timerId = setTimeout(() => {
res.destroy();
console.log(`retrying`, name);
downloadVideo(send);
}, 2000);
if (!throttle) {
send(`PercentDownloaded`, [
index,
100 * fileSize(filePath) / res.headers[`content-length`]
]);
throttle = true;
setTimeout(() => throttle = false, 500);
}
});
res
.pipe(fs.createWriteStream(filePath))
.on(`finish`, () => {
clearTimeout(timerId);
send(`Done`, index);
});
});
} catch (error) {
console.log(name, error);
send(`DownloadError`, index);
}
}

Multiple and fast external POST requests

I have a node.js server that making POST requests to an external API, each time I have to make ~10k requests (don't worry I'm not abusing the API) and I need that it will take around 2-3 minutes.
I'm using request-promise library in order to make the requests along with Promise.all() to wait for all the requests to resolve.
My problem is that the requests seems stuck and not running in parallel, I know that the promise executes as soon it's created but it seems that the resolve event can only listen to about 10 events at one time.
I tried updating the maxListeners and also using es6-promise-pool (with pool of 500) but no luck.
My next solution will probably be to use child-process with fork, will this solution seems the best for my problem?
Thanks!
code:
async function send_msg(msg) {
return new Promise(function (resolve, reject) {
request.post(options, function (err, res, body) {
if (err) {
logger.error('error sending msg ' + err);
resolve(null);
} else {
resolve(body);
}
})
});
}
}
async function send_msgs() {
let msgs = await OutgoingMessage.findAll();
for (let i = 0; i < msgs.length; i++) {
promises.push(send_msg(msgs[i]).then(async (result) => {
if (result != null) {
try {
let sid = result['MessageSid'];
let status = result['Status'];
msgs[i].update({sid: sid, status: status});
} catch (e) {
logger.error(e + JSON.stringify(result));
msgs[i].update({status: 'failed'});
}
}
}));
}
return Promise.all(promises);
}

Resources