Add delay response in nodejs - node.js

I am a beginner at nodejs. My problem is that I have 16 clients sending requests to the server. The server must aggregate 16 requests and send them to the client. (each client will receive 16 requests, of which 15 are from other clients).
How can I add delay to server (nodejs) while waiting for request aggregation then send it to each client.
Can you help me.

You have to ensure that all the clients will send a parameter called batchId. You can simply,
Keep a cache
Put the request body & response in there with a batchId
On every client request, push the body & response using batchId.
On every client request, check if the batch size is 16
If 16, aggregate and response all the requests at once.
Delete the local cache of the batch.
const requestsCache = {};
function aggregateAndRespond(batchId) {
const arrivedRequests = requestsCache[batchId];
// process "body" property in the "arrivedRequests" elements
const result = processRequests(arrivedRequests);
for(let arrivedRequest of arrivedRequests) {
arrivedRequest.response.json(result);
}
delete requestsCache[batchId];
}
app.post('/client-requests', function(request, response) {
const body = request.body;
const batchId = body.batchId;
const arrivedRequests = requestsCache[batchId] || [];
arrivedRequests.push({ response, body });
if(arrivedRequests.length === 16) {
aggregateAndRespond(batchId);
}
});
Update:
const requestsCache = {};
function aggregateAndRespond(batchId) {
const hours = 2;
const batchDetails = requestsCache[batchId];
const arrivedRequests = batchDetails.requests;
// process "body" property in the "arrivedRequests" elements
const result = processRequests(arrivedRequests);
for(let arrivedRequest of arrivedRequests) {
arrivedRequest.response.json(result);
}
batchDetails.processed = true;
// batch details cannot be deleted immediately because,
// if a same batch request comes after the time out,
// it will end up creating a new batch details
// if that is desired, replace the next timer lines with "delete requestsCache[batchId];"
// otherwise, as the batch details is marked as "processed", remaining requests of same batch
// will simply be dropped.
// The batch details will also be dropped after 2 hours.
// If you think shorter duration would suffice, update accordingly.
setTimeout(function someMoreTimeLater() {
delete requestsCache[batchId];
}, hours * 60 * 60 * 1000);
}
function tryAggregateAndRespond(batchId, timeoutInMinutes, requestData) {
const defaultBatchDetails = {
processed: false,
requests: [],
aggregateTimerReference: null
};
const batchDetails = requestsCache[batchId] || defaultBatchDetails;
if(batchDetails.processed === true) {
return true;
}
batchDetails.requests.push(requestData);
// timer is reset every time the request is arrived.
// if no request is arrived for configured time after the last request,
// aggregation will kick in
// if you want the timer to be set only at the first request,
// delete the next line and uncomment the "if" block after that.
clearTimeout(batchDetails.aggregateTimerReference);
//if(batchDetails.aggregateTimerReference !== null) {
// return false;
//}
batchDetails.aggregateTimerReference = setTimeout(function someTimeLater() {
aggregateAndRespond(batchId);
}, timeoutInMinutes * 60 * 1000);
}
app.post('/client-requests', function(request, response) {
const timeoutInMinutes = 2;
const body = request.body;
const batchId = body.batchId;
const endRequest = tryAggregateAndRespond(batchId, timeoutInMinutes, { response, body });
if(endRequest === true) {
response.json({ message: 'Batch already processed', batchId });
}
});

Related

How can I share data between 2 resolver functions in Express GraphQL?

I have 2 queries, and corresponding functions, but while writing the resolver, I'm not sure how to store the 1st func data, then reuse it in the second one. Note: I do not want to call the function again as it will be executing again and has an inline API call. I just want to use it like a session on the global state in express js. Here's the code:
const resolvers={
getStudent:async({id})=>{
const resp=await service(id)
return resp;
},
const courseDetails:()=>{
console.log(resp)// I want to access resp object from above func., But don't want to call getStudent again
}
}
I tried context but didn't work.
You can implement a simple in-memory store.
By storing the Promise and returning it you won't need to worry about multiple requests to the same resources.
const got = require('got');
const assert = require('assert');
function studentServiceFactory(options = {}) {
const TTL = options.ttl || 60 * 60 * 5; // default 5 min ttl
const BASE_API = "https://swapi.dev/api";
const store = {};
return {
get: ({ id }) => {
if(!store[id] || store[id].timestamp + TTL < Date.now()) {
// store the promise
store[id] = {
promise: got(`${BASE_API}/people/${id}`),
timestamp: Date.now(),
};
console.log(`${BASE_API}/people/${id}`);
}
return store[id].promise;
}
}
}
const studentService = studentServiceFactory({ ttl: 1000});
const resolvers = {
studentService: studentService,
};
// test program
(async () => {
const request1 = await resolvers.studentService.get({ id: 1 });
const request2 = await resolvers.studentService.get({ id: 1 });
// Both calls will return the same promise.
assert.equal(request1, request2);
// wait for resources to get stale
setTimeout(async() => {
const request3 = await resolvers.studentService.get({ id: 1 });
assert.notEqual(request1, request3);
}, 3000);
})();
Two requests are independent of each other. The only way to share data between two requests is to persist the data somewhere. It can be a file, database, etc. In your case, you can simply call the service function again in the other resolver.

Google Pub/Sub pull method restart express server after every 1 min

I am using pun/sub to pull messages when someone buys a subscription from google play.
const { PubSub } = require('#google-cloud/pubsub');
const grpc = require('grpc');
// Instantiates a client
const pubSubClient = new PubSub({ grpc });
const pubsub = () => {
const projectId = process.env.GOOGLE_PUB_SUB_PROJECT_ID; // Your Google Cloud Platform project ID
const subscriptionName = process.env.GOOGLE_PUB_SUB_SUBSCRIBER_NAME; // Name of our subscription
const timeout = 60;
const maxInProgress = 10;
const subscriberOptions = {
flowControl: {
maxMessages: maxInProgress,
},
};
// Get our created subscription
const subscriptionPub = pubSubClient.subscription(subscriptionName, subscriberOptions);
console.log(`subscription ${subscriptionPub.name} found.`);
// Create an event handler to handle messages
let messageCount = 0;
// Create an event handler to handle messages
const messageHandler = message => {
console.log(`Received message: ${message.id}`);
console.log(`Data: ${message.data}`);
console.log(`Attributes: ${JSON.stringify(message.attributes)}`);
//todo: you can update your backend here using the purchase token
messageCount += 1;
// "Ack" (acknowledge receipt of) the message
message.ack();
};
// Create an event handler to handle errors
const errorHandler = function (error) {
console.log(`GOOGLE PUB SUB ERROR: ${error}`);
throw error;
};
// Listen for new messages/errors until timeout is hit
subscriptionPub.on('message', messageHandler);
subscriptionPub.on('error', errorHandler);
setTimeout(() => {
subscriptionPub.removeListener('message', messageHandler);
subscriptionPub.removeListener('error', errorHandler);
console.log(`${messageCount} message(s) received.`);
}, timeout * 1000);
};
module.exports = pubsub;
And above file is called in the main.js file and every 1 min I am receiving log subscription ${subscriptionPub.name} found.
also, I have commented setTimeout code as of now but I want to understand why removeListener is important to remove the listener every one minute.

Do node js worker never times out?

I have an iteration that can take up to hours to complete.
Example:
do{
//this is an api action
let response = await fetch_some_data;
// other database action
await perform_operation();
next = response.next;
}while(next);
I am assuming that the operation doesn't times out. But I don't know it exactly.
Any kind of explanation of nodejs satisfying this condition is highly appreciated. Thanks.
Update:
The actual development code is as under:
const Shopify = require('shopify-api-node');
const shopServices = require('../../../../services/shop_services/shop');
const { create } = require('../../../../controllers/products/Products');
exports.initiate = async (redis_client) => {
redis_client.lpop(['sync'], async function (err, reply) {
if (reply === null) {
console.log("Queue Empty");
return true;
}
let data = JSON.parse(reply),
shopservices = new shopServices(data),
shop_data = await shopservices.get()
.catch(error => {
console.log(error);
});
const shopify = new Shopify({
shopName: shop_data.name,
accessToken: shop_data.access_token,
apiVersion: '2020-04',
autoLimit: false,
timeout: 60 * 1000
});
let params = { limit: 250 };
do {
try {
let response = await shopify.product.list(params);
if (await create(response, shop_data)) {
console.log(`${data.current}`);
};
data.current += data.offset;
params = response.nextPageParameters;
} catch (error) {
console.log("here");
console.log(error);
params = false;
};
} while (params);
});
}
Everything is working fine till now. I am just making sure that the execution will ever happen in node or not. This function is call by a cron every minute, and data for processing is provided by queue data.

Rate limit API requests in node.js

First off, I'm new here, so please...be gentle...I've been teaching myself node.js over the last few months, mostly with the desire to scrape a bunch of data from the FlightAware website API.
I am trying to request from their site a list of flights for aircraft, here is what I have so far.
var aircraft = [array,of,aircraft,tail,numbers]
for (i=0; i < aircraft.length; i++) {
faGetFlight(aircraft[i],function doneLookup(data) {
dbUpdateFlight(collectionName, data)
})
}
This code works, but as soon as there is more than 10 aircraft in the list, it fails, because is sending more than 10 API requests in a minute. What are some easy/straightforward ways to slow this down a little. I would like to send about 50-60 API requests total each time this runs, so I need it spaced over 5-6 minutes. The faGetFlight() function uses the 'request' module. I've tried the request-rate-limit module instead of the request module, but I can't seem to make it work. I don't think the authorization works properly with the request-rate-limiter module. Getting an error about anonymous user. For what it's work, it works with just the request module instead, but I run into the rate limit issues.
Here is my faGetFlight() code.
var RateLimiter = require('request-rate-limiter');
const REQS_PER_MIN = 10; // that's 25 per second
var limiter = new RateLimiter(REQS_PER_MIN);
//API Variables //
var apiUrl = 'url'
var apiEndpoint = 'endpoint'
var apiAuth = 'apikey'
var apiExtData = 0
var apiHowMany = 15 //Number of results to request.
var options = { method: 'GET',
url: apiUrl + apiEndpoint,
qs: { ident: acIdent
},
headers:
{
Authorization: apiAuth }
};
limiter.request(options, function doneDownload(error, response, body) {
if (error) throw new error(error);
callback(body)
});
}
Sorry if this isn't clear...it's my first post!
You can do a naive implementation using functions and a simple setTimeout.
See:
var aircrafts = [array,of,aircraft,tail,numbers];
var REQS_PER_MIN = 10;
var MAX_AMOUNT_REQUESTS = 100;
var timeout = (1 / REQS_PER_MIN) * 60 * 1000;
processAircraft(0);
function processAircraft(index){
if(index >= MAX_AMOUNT_REQUESTS)
return console.log("All done!");
//On start of function, schedule next processing in "timeout" ms
setTimeout(function(){
processAircraft(index+1);
}, timeout);
faGetFlight(aircrafts[index], function doneLookup(data) {
dbUpdateFlight(collectionName, data)
})
}

Curl and Postman giving different response?

I am writing API in node.js.
url http://localhost:8000/api/request?connId=19&timeout=8000
when I call above API from Postman then it takes 8 seconds to give the response. same as expected.
but when call this from curl it gives output instantly.
curl http://localhost:8000/api/request?connId=19&timeout=8000
Code is:
app.get('/api/request', ( req, res ) => {
let timeoutID = null; // return by setTimeout
let starttime = null; // time when request is begin
const time = req.query.timeout; // time send by client
const connId = req.query.connId; // id send by client
// checking client data is valid or not
if ( typeof(connId) !== undefined && typeof(time) !== undefined ) {
starttime = new Date(); // get current time
starttime = starttime.setMilliseconds(starttime.getMilliseconds()); // convert into millisecon
// initiate setTimeout
timeoutID = setTimeout(() => {
// remove that element from global array which reqest has been complted
removeByAttr(timeout, 'connId', connId);
// res.send({ status: "ok"}); // send response
}, time);
// initiate setInterval
const setInv = setInterval(() => {
// check timeout [] conatin the current request
const arrLength = timeout.length && timeout.filter(({ connId }) => req.query.connId === connId).length;
if ( arrLength === 0 ) {
res.send({ status: "ok"}); // send response
clearInterval(setInv); // clear/destroy current Interval
}
}, 80);
// insert the current request into global [] timeout
timeout.push({
connId,
time,
timeoutID,
starttime,
'endtime': +starttime + +time
});
}
});
As I found from your code and curl tutorial that When you GET a endpoint with curl your command should be like this:
curl "http://localhost:8000/api/request?connId=19&timeout=8000"
Without double inverted comma in url, console.log(req.query) will result in { connId: '19' } which is causing the problem.

Resources