I have express js server which listens for a request from a user:
// PUG template
$("#request").click(()=>{
$.ajax({url: "/launch", method: 'get'});
})
// server.js
app.get('/launch', (req, res) => {
getCatalog();
}
This should launch a huge do while function, which may literally work for hours, except if user wishes to cancel it.
Question: what should be the proper way to launch and cancel this function by user request?
// PUG template
$("#cancel").click(()=>{
...
})
I would approach this case with code logic other than express functionality.
You can create a class that handles catalog loading and also have a state for this process that you can turn on and off (I believe loading process involves multi async functions calls so the event loop allow this).
For example:
class CatalogLoader {
constructor() {
this.isProcessing = false
}
getCatalog() {
this.isProcessing = true
while(... && this.isProcessing) {
// Huge loading logic
}
this.isProcessing = false
}
}
And in express you can add below api:
app.get('/launch', (req, res) => {
catalogLoader.getCatalog();
}
app.get('/cancelLaunch', (req, res) => {
catalogLoader.isProcessing = false
...
}
Second possible solution using require('child_process');, but you need to know the PID of the process you wish to cancel. Benefit: unload the main node thread from a heavy task.
So, including node's const childProcess = require('child_process');
Then:
app.get('/launch', (req,res) => {
const getCatalog = childProcess.fork('script.js', null, {
detached: true
});
res.send();
});
app.get('/kill', (req,res,next) => {
const pid = req.query.pid;
if (pid) {
process.kill(pid);
res.send();
} else {
res.end();
}
});
$("#requestCancel").click(()=>{
$.ajax({url: "/kill?pid=variable*", method: 'get'});
})
I send data to PUG's js from node via Server Sent Events
Related
I am trying to append some data to my request object using a middleware, but I want to do it only once the server is up.
So I tried doing it with a middleware, while trying to use a function's context, but it's a bit problematic to perform such an action on a middleware, because I cannot pass a promise as a middleware.
This is what I'm trying to do:
const setupData = async () => {
const data = await getSomeData();
return (req, res, next) => {
req.data = data;
next();
}
}
app.use(setupData());
I tried using the solution suggested here, but it won't work as this will happen on every request.
Any idea how can I go around this? I can always put the info on a global var, but I would like to avoid it.
I also saw some in-memory packages to help with it (such as node-cache), but I would like to do it with a middleware.
Thanks in advance
Just cache the result using a normal variable:
let data = null;
function setupData (req, res, next) {
if (data !== null) {
req.data = data;
next();
}
else {
getSomeData().then(result => {
data = result
req.data = data;
next();
});
}
}
app.use(setupData);
This is the minimal, least complicated implementation. You can of course refactor it to be much DRYer and less prone to mistakes by taking out the caching logic:
Cleaner Implementation
let cache = null;
async function getCachedData() {
if (cache === null) {
cache = await getSomeData();
}
return cache;
}
Which makes setupData much cleaner:
function setupData (req, res, next) {
getCachedData().then(data => {
req.data = data;
next();
});
}
Either way, the cache is triggered on the first request. This means that there is a possibility that a second request may arrive before the data is possibly cached. So at startup the getSomeData() function may run more than once.
Really call getSomeData() ONLY ONCE
If you really want to call getSomeData only once you must call it before setting up Express:
async function main () {
const data = await getSomeData();
const app = express();
//
// set up express middlewares...
//
app.use((req,res,next) => {
req.data = data;
next();
});
//
// set up routes...
//
app.listen(config.port);
}
main(); // start everything
The key here is to realize that we have been trying to do everything backwards: to set up a constant value asynchronously AFTER starting to set up Express. The natural flow of the program wants the constant value to exist BEFORE we begin setting up Express so we only perform everything else inside the async function (here called main). Not try to run the async function while setting up Express.
You can do it without async:-
const setupData = (req, res, next) => {
// You can put a condition here so that it runs only once
getSomeData().then((data) => {
req.app.locals.data = data //The data can be accessed in the next middleware using req.app.locals
next();
}).catch((error) => {
console.log("Error Occured");
res.status(400).end("Error Occurred");
})
}
app.use(setupData);
You should see the documentation of getSomeData and see how it works
I'm building a Slackbot that makes a call to an Express app, which then needs to 1) fetch some other data from the Slack API, and 2) insert resulting data in my database. I think I have the flow right finally using async await, but the operation is timing out because the original call from the Slackbot needs to receive a response within some fixed time I can't control. It would be fine for my purposes to ping the bot with a response immediately, and then execute the rest of the logic asynchronously. But I'm wondering the best way to set this up.
My Express route looks like:
const express = require('express');
const router = express.Router();
const knex = require('../../db/knex.js');
const slack = require('../../services/slack_helpers');
// POST api/slack/foo
router.post('/foo', async (req, res) => {
let [body, images] = await slack.grab_context(req);
knex('texts')
.insert({ body: body,
image_ids: images })
.then(text => { res.send('worked!'); }) // This sends a response back to the original Slackbot call
.catch(err => { res.send(err); })
});
module.exports = router;
And then the slack_helpers module looks like:
const { WebClient } = require('#slack/web-api');
const Slack = new WebClient(process.env.SLACKBOT_TOKEN);
async function grab_context(req) {
try {
const context = await Slack.conversations.history({ // This is the part that takes too long
channel: req.body.channel_id,
latest: req.headers['X-Slack-Request-Timestamp'],
inclusive: true,
limit: 5
});
} catch (error) {
return [error.toString(), 'error'];
}
return await parse_context(context);
};
function parse_context(context) {
var body = [];
context.messages.forEach(message => {
body.push(message.text);
});
body = body.join(' \n');
return [body, ''];
}
module.exports = {
grab_context
};
I'm still getting my head around asynchronous programming, so I may be missing something obvious. I think basically something like res.send perhaps needs to come before the grab_context call? But again, not sure the best flow here.
Update
I've also tried this pattern in the API route, but still getting a timeout:
slack.grab_context(req).then((body, images) => {
knex ...
})
Your timeout may not be coming from where you think. From what I see, it is coming from grab_context. Consider the following simplified version of grab_context
async function grab_context_simple() {
try {
const context = { hello: 'world' }
} catch (error) {
return [error.toString(), 'error']
}
return context
}
grab_context_simple() /* => Promise {
<rejected> ReferenceError: context is not defined
...
} */
You are trying to return context outside of the try block where it was defined, so grab_context will reject with a ReferenceError. It's very likely that this error is being swallowed at the moment, so it would seem like it is timing out.
The fix is to move a single line in grab_context
async function grab_context(req) {
try {
const context = await Slack.conversations.history({
channel: req.body.channel_id,
latest: req.headers['X-Slack-Request-Timestamp'],
inclusive: true,
limit: 5
});
return await parse_context(context); // <- moved this
} catch (error) {
return [error.toString(), 'error'];
}
};
I'm wondering the best way to set this up.
You could add a higher level try/catch block to handle errors that arise from the /foo route. You could also improve readability by staying consistent between async/await and promise chains. Below is how you could use async/await with knex, as well as the aforementioned try/catch block
const express = require('express');
const router = express.Router();
const knex = require('../../db/knex.js');
const slack = require('../../services/slack_helpers');
const insertInto = table => payload => knex(table).insert(payload)
const onFooRequest = async (req, res) => {
try {
let [body, images] = await slack.grab_context(req);
const text = await insertInto('texts')({
body: body,
image_ids: images,
});
res.send('worked!');
} catch (err) {
res.send(err);
}
}
router.post('/foo', onFooRequest);
module.exports = router;
My website's api works fine and runs fast, except for one route. Since nodejs is single threaded, we wanted to make the api call DATA be separately threaded so that it doesnt block the rest of the incoming calls, and because it took too long to fork. Basically the code I want is like this:
router.get(req, res){
if(MasterThread){
create a thread to run the DATA functions
}
else{
res.json(getDATA())
}
}
is this possible at all? All the tutorials I found implied that I either had to use cluster, or my threading had to occur in my main.js, neither of which I want to do.
When I tried to set up the threading across 2 files, the imports for nodejs threading were always null, or my imports never worked.
So again, if this a possible thing?
You can use worker_threads for this case I use it for one of my sites and it works perfectly here is minimal example
const { Worker } = require('worker_threads')
const worker_script = path.join(__dirname, "./worker.js")
router.get('/', function(req, res) {
const worker = new Worker(worker_script, {
workerData: JSON.stringify(obj)
})
worker.on("error", (err) => console.log(err))
worker.on("exit", () => console.log("exit"))
worker.on("message", (data) => {
console.log(data)
res.send(data)
})
})
and here is the worker
const { parentPort, workerData, isMainThread } = require('worker_threads')
if (!isMainThread) {
console.log("workerData: ", workerData)
//do some heavy work with the data
var parsed = JSON.parse(workerData)
parentPort.postMessage(parsed)
}
I use Supertest to test my Express apps, but I'm running into a challenge when I want my handlers to do asynchronous processing after a request is sent. Take this code, for example:
const request = require('supertest');
const express = require('express');
const app = express();
app.get('/user', async (req, res) => {
res.status(200).json({ success: true });
await someAsyncTaskThatHappensAfterTheResponse();
});
describe('A Simple Test', () => {
it('should get a valid response', () => {
return request(app)
.get('/user')
.expect(200)
.then(response => {
// Test stuff here.
});
});
});
If the someAsyncTaskThatHappensAfterTheResponse() call throws an error, then the test here is subject to a race condition where it may or may not failed based on that error. Even aside from error handling, it's also difficult to check for side effects if they happen after the response is set. Imagine that you wanted to trigger database updates after sending a response. You wouldn't be able to tell from your test when you should expect that the updates have completely. Is there any way to use Supertest to wait until the handler function has finished executing?
This can not be done easily because supertest acts like a client and you do not have access to the actual req/res objects in express (see https://stackoverflow.com/a/26811414/387094).
As a complete hacky workaround, here is what worked for me.
Create a file which house a callback/promise. For instance, my file test-hack.js looks like so:
let callback = null
export const callbackPromise = () => new Promise((resolve) => {
callback = resolve
})
export default function callWhenComplete () {
if (callback) callback('hack complete')
}
When all processing is complete, call the callback callWhenComplete function. For instance, my middleware looks like so.
import callWhenComplete from './test-hack'
export default function middlewareIpnMyo () {
return async function route (req, res, next) {
res.status(200)
res.send()
// async logic logic
callWhenComplete()
}
}
And finally in your test, await for the callbackPromise like so:
import { callbackPromise } from 'test-hack'
describe('POST /someHack', () => {
it.only('should handle a post request', async () => {
const response = await request
.post('/someHack')
.send({soMuch: 'hackery'})
.expect(200)
const result = await callbackPromise()
// anything below this is executed after callWhenComplete() is
// executed from the route
})
})
Inspired by #travis-stevens, here is a slightly different solution that uses setInterval so you can be sure the promise is set up before you make your supertest call. This also allows tracking requests by id in case you want to use the library for many tests without collisions.
const backgroundResult = {};
export function backgroundListener(id, ms = 1000) {
backgroundResult[id] = false;
return new Promise(resolve => {
// set up interval
const interval = setInterval(isComplete, ms);
// completion logic
function isComplete() {
if (false !== backgroundResult[id]) {
resolve(backgroundResult[id]);
delete backgroundResult[id];
clearInterval(interval);
}
}
});
}
export function backgroundComplete(id, result = true) {
if (id in backgroundResult) {
backgroundResult[id] = result;
}
}
Make a call to get the listener promise BEFORE your supertest.request() call (in this case, using agent).
it('should respond with a 200 but background error for failed async', async function() {
const agent = supertest.agent(app);
const trackingId = 'jds934894d34kdkd';
const bgListener = background.backgroundListener(trackingId);
// post something but include tracking id
await agent
.post('/v1/user')
.field('testTrackingId', trackingId)
.field('name', 'Bob Smith')
.expect(200);
// execute the promise which waits for the completion function to run
const backgroundError = await bgListener;
// should have received an error
assert.equal(backgroundError instanceof Error, true);
});
Your controller should expect the tracking id and pass it to the complete function at the end of controller backgrounded processing. Passing an error as the second value is one way to check the result later, but you can just pass false or whatever you like.
// if background task(s) were successful, promise in test will return true
backgroundComplete(testTrackingId);
// if not successful, promise in test will return this error object
backgroundComplete(testTrackingId, new Error('Failed'));
If anyone has any comments or improvements, that would be appreciated :)
Being new to Node, I am still having some troubles with callbacks.
In the mapBpiIfindex function I am trying to loop through all of the VLANs found by the vlans function. Once it has looped through all VLANs, creating the map, I want to output the map to the browser. But, the only output I am getting is {}. How can I send the mapping to the browser? I am not even sure if I am using my callbacks correctly.
var express = require('express');
var router = express.Router();
var snmp = require('snmp-native');
// Create a Session with explicit default host, port, and community.
let session = new snmp.Session({ host: 'AASW0120', port: 161, community: 'community' })
let Mibs = {
hostname: [1,3,6,1,2,1,1,5,0],
vlans: [1,3,6,1,4,1,9,9,46,1,3,1,1,2],
dot1dBasePortIfIndex: [1,3,6,1,2,1,17,1,4,1,2]
}
/* Get all VLANs on switch */
function vlans(snmpSession, cb) {
let vlans = []
session.getSubtree({ oid: Mibs.vlans }, function (error, varbinds) {
if (error) {
console.log('Fail :(');
} else {
varbinds.forEach(function (varbind) {
vlans.push(varbind.oid[varbind.oid.length -1])
})
}
cb(vlans)
})
}
/* Map BPIs to Ifindices */
function mapBpiIfindex(session, cb) {
let map = {}
vlans(session, function (vlans) {
vlans.forEach(function (vlan) {
session.getSubtree({oid: Mibs.dot1dBasePortIfIndex, community: 'community#' + vlan}, function (error, varbinds) {
if (error) {
console.log('Fail :(')
} else {
varbinds.forEach(function (varbind) {
map[varbind.oid[varbind.oid.length -1]] = {ifindex: varbind.value, vlan: vlan}
})
}
})
})
cb(map)
})
}
router.get('/vlans', function (req, res, next) {
vlans(session, function (vlans) {
res.send(vlans)
})
})
router.get('/bpi-ifindex', function (req, res, next) {
mapBpiIfindex(session, function (mapping) {
res.send(mapping)
})
})
The answer is no, youre not using it correctly ;)
A few things here:
You should be clear that only the code within the callback is executed after the operation has finished, so cb(map)does not wait until all youre looped callbacks have finished. Thats why nothing is returned (because when cb is called, the async functions have not finished yet and map values are undefined. Have a look at this How do I return the response from an asynchronous call?, its the same principle.
Have a look at async module. Specifically, the do* or whilst methods. It'll help you process loops with async function calls.
Apart from that, you should not use forEach if you mind about performance.