running a fastify route periodically (via cron) - cron

I would like to run a particular fastify route periodically. There is fastify-cron but I can't figure out if I can use that to call a route from within fastify. To summarize, given a route as below, I would like https://my/server/greeting to be called every midnight.
fastify.get('/greeting', function (request, reply) {
reply.send({ hello: 'world' })
})

By using that plugin you can use the inject method, used to write tests typically:
import Fastify from 'fastify'
import fastifyCron from 'fastify-cron'
const server = Fastify()
server.get('/greeting', function (request, reply) {
reply.send({ hello: 'world' })
})
server.register(fastifyCron, {
jobs: [
{
cronTime: '0 0 * * *', // Everyday at midnight UTC
onTick: async server => {
try {
const response = await server.inject('/greeting')
console.log(response.json())
} catch (err) { console.error(err) }
}
}
]
})
server.listen(() => {
// By default, jobs are not running at startup
server.cron.startAllJobs()
})
This is needed because you need to generate e request/response object to run your handler.

Related

How to implement fastify hook for specific prefix?

I needed a separate validation logic per version (prefix)
const routes = require('./routes/v1/users')
fastify.register(route, { prefix: `/v1` }) // currently using fastify#4.5.2
Might be good if there's something similar to below:
fastify.register(route, {
prefix: `/v1`,
preValidation: (req, reply) => { /* logic here */ }
})
Tried looking fastify docs, other channels and I can't find any similar implementation.
There is not such option to the fastify.register method, but you can do it playing with encapsulation.
You can create a dedicated context that includes your routes + the hook.
This context will be isolated by other context you will create with register.
This example show you how:
const fastify = require('fastify')({ logger: false, exposeHeadRoutes: false })
async function run() {
const route = function routesPlugin(fastify, options, next) {
fastify.get('/hello', async (request, reply) => {
console.log('request')
return { hello: 'world' }
})
next()
}
fastify.register(function intermediatePlugin(instance, opts, next) {
instance.addHook('preValidation', function hook(request, reply, done) {
console.log('only for v1')
done()
})
instance.register(route, { prefix: `/v1` })
next()
})
fastify.register(route, { prefix: `/v2` })
await fastify.inject('/v1/hello')
await fastify.inject('/v2/hello')
}
run()
You can visualize this structure by using fastify-overview (and the fastify-overview-ui plugin together:
Note that the hook is child of the intermediatePlugin that is not shared by the v2's routesPlugin sibling.

Load plugin after another fastify

I'm currently working on an api where I autoload my plugins but I would like to add global hooks on all routes.
What I'm currently doing is I load all my plugins and after I load my middlewares but I have a problem
I'm trying to use mongoClient created by fastify-mongo but I always end up with the error mongo is not defined.
When I'm using it on my controller everything works i thinks I get this error because the plugin is not fully loaded, I did find .ready but it doesn't work
plugin/mongo.js
import fp from 'fastify-plugin';
import mongoPl from '#fastify/mongodb';
async function mongo(fastify, opts) {
fastify.register(mongoPl, {
forceClose: true,
url: 'mongodb://mongo:27017'
});
}
export default fp(mongo, {
name: 'mongo'
});
libs/middleware
import fp from "fastify-plugin";
async function hooks(fastify, opts) {
fastify.addHook('onRequest', (req, res, done) => {
// Inject mongo client
mongoClient(req);
done();
});
}
async function mongoClient(req){
try {
req.db = this.mongo.client.db('db-name');
}catch (e) {
console.error(e);
}
}
export default fp(hooks, {
name: 'hooksMiddleware'
});
app.js
app.register(AutoLoad, {
dir: join(import.meta.url, 'plugins'),
options: Object.assign({})
}).after(() => {
app.register(hooksMiddleware, {});
});
Your setup looks good, but the this context in mongoClient function is undefined.
Here some fixes:
async function hooks(fastify, opts) {
fastify.addHook('onRequest', (req, res, done) => {
// Inject mongo client
mongoClient.call(fastify, req);
done();
});
}
Or:
async function hooks(fastify, opts) {
// NOTE: I changed from arrow to an anonymous function
fastify.addHook('onRequest', function (req, res, done) {
// Inject mongo client
mongoClient.call(this, req);
done();
});
}
You need to know:
the this context is set only on those named function that you provide to Fastify. The mongoClient function is just a function outside the Fastify's control
the this context cannot be set for arrow function. (opinion) I will never stop to say: use arrow function only for 1-line function, otherwise named function are always the best choice (better stack tracing, more readable)

schedule route or controller

i have a controller
exports.updateDaily = async (req, res) => {
try {
const updateDaily = await transaction.decrement(
{
remainActive: 1,
},
{
where: {
remainActive: { [Op.gte]: 1 },
},
}
);
console.log(updateDaily);
res.status(200).send({
status: "Success",
});
} catch (error) {
console.log(error);
res.status(400).send({
status: "Failed",
});
}
};
and route like this
router.patch('/updateDaily', updateDaily)
Using cron-node
let job = new cron.schedule('2 * * * *', () => {
router.patch('/updateDaily', updateDaily);
});
job.start()
Using setIntverval
const scheduller = () => {
return router.patch('/updateDaily', updateDaily);
}
setInterval(scheduller,600000)
how can i make one of them run every 10 minutes? i already try it with node-cron or setInterval but there is nothing happen
You got the concept wrongly. Having a scheduled job means running something periodically which has to be a function that does something. You don't need a router defined in it. Your code does only one thing - schedules a router to be available for the user once the scheduled time happens and redefines it every 10 minutes.
What you need is to create a separate function for the operation and call it on a scheduled time and having a router at this point is extra unless you want to run it when the user sends a request as well.
const myDailyTask = async () => {
await transaction.decrement(
{
remainActive: 1,
},
{
where: {
remainActive: { [Op.gte]: 1 },
},
}
);
};
const id = setInterval(myDailyTask, 600_000);
It should work for the cron job the same way
const job = new cron.schedule('2 * * * *', () => {
myDailyTask();
});
job.start();

How to launch/cancel a function in express by user request

I have express js server which listens for a request from a user:
// PUG template
$("#request").click(()=>{
$.ajax({url: "/launch", method: 'get'});
})
// server.js
app.get('/launch', (req, res) => {
getCatalog();
}
This should launch a huge do while function, which may literally work for hours, except if user wishes to cancel it.
Question: what should be the proper way to launch and cancel this function by user request?
// PUG template
$("#cancel").click(()=>{
...
})
I would approach this case with code logic other than express functionality.
You can create a class that handles catalog loading and also have a state for this process that you can turn on and off (I believe loading process involves multi async functions calls so the event loop allow this).
For example:
class CatalogLoader {
constructor() {
this.isProcessing = false
}
getCatalog() {
this.isProcessing = true
while(... && this.isProcessing) {
// Huge loading logic
}
this.isProcessing = false
}
}
And in express you can add below api:
app.get('/launch', (req, res) => {
catalogLoader.getCatalog();
}
app.get('/cancelLaunch', (req, res) => {
catalogLoader.isProcessing = false
...
}
Second possible solution using require('child_process');, but you need to know the PID of the process you wish to cancel. Benefit: unload the main node thread from a heavy task.
So, including node's const childProcess = require('child_process');
Then:
app.get('/launch', (req,res) => {
const getCatalog = childProcess.fork('script.js', null, {
detached: true
});
res.send();
});
app.get('/kill', (req,res,next) => {
const pid = req.query.pid;
if (pid) {
process.kill(pid);
res.send();
} else {
res.end();
}
});
$("#requestCancel").click(()=>{
$.ajax({url: "/kill?pid=variable*", method: 'get'});
})
I send data to PUG's js from node via Server Sent Events

How should I connect to a Redis instance from an AWS Lambda function?

I'm trying to build an API for a single-page web app using AWS Lambda and the Serverless Framework. I want to use Redis Cloud for storage, mostly for its combination of speed and data persistence. I may use more Redis Cloud features in the future, so I'd prefer to avoid using ElastiCache for this. My Redis Cloud instance is running in the same AWS region as my function.
I have a function called related that takes a hashtag from a GET request to an API endpoint, and checks to see if there's an entry for it in the database. If it's there, it should return the results immediately. If not, it should query RiteTag, write the results to Redis, and then return the results to the user.
I'm pretty new to this, so I'm probably doing something adorably naive. Here's the event handler:
'use strict'
const lib = require('../lib/related')
module.exports.handler = function (event, context) {
lib.respond(event, (err, res) => {
if (err) {
return context.fail(err)
} else {
return context.succeed(res)
}
})
}
Here's the ../lib/related.js file:
var redis = require('redis')
var jsonify = require('redis-jsonify')
var rt = require('./ritetag')
var redisOptions = {
host: process.env.REDIS_URL,
port: process.env.REDIS_PORT,
password: process.env.REDIS_PASS
}
var client = jsonify(redis.createClient(redisOptions))
module.exports.respond = function (event, callback) {
var tag = event.hashtag.replace(/^#/, '')
var key = 'related:' + tag
client.on('connect', () => {
console.log('Connected:', client.connected)
})
client.on('end', () => {
console.log('Connection closed.')
})
client.on('ready', function () {
client.get(key, (err, res) => {
if (err) {
client.quit()
callback(err)
} else {
if (res) {
// Tag is found in Redis, so send results directly.
client.quit()
callback(null, res)
} else {
// Tag is not yet in Redis, so query Ritetag.
rt.hashtagDirectory(tag, (err, res) => {
if (err) {
client.quit()
callback(err)
} else {
client.set(key, res, (err) => {
if (err) {
callback(err)
} else {
client.quit()
callback(null, res)
}
})
}
})
}
}
})
})
}
All of this works as expected, to a point. If I run the function locally (using sls function run related), I have no problems whatsoever—tags are read from and written to the Redis database as they should be. However, when I deploy it (using sls dash deploy), it works the first time it's run after deployment, and then stops working. All subsequent attempts to run it simply return null to the browser (or Postman, or curl, or the web app). This is true regardless of whether the tag I use for testing is already in the database or not. If I then re-deploy, making no changes to the function itself, it works again—once.
On my local machine, the function first logs Connected: true to the console, then the results of the query, then Connection closed. On AWS, it logs Connected: true, then the results of the query, and that's it. On the second run, it logs Connection closed. and nothing else. On the third and all subsequent runs, it logs nothing at all. Neither environment ever reports any errors.
It seems pretty clear that the problem is with the connection to Redis. If I don't close it in the callbacks, then subsequent attempts to call the function just time out. I've also tried using redis.unref instead of redis.quit, but that didn't seem to make any difference.
Any help would be greatly appreciated.
I've now solved my own problem, and I hope I can be of help to someone experiencing this problem in the future.
There are two major considerations when connecting to a database like I did in the code above from a Lambda function:
Once context.succeed(), context.fail(), or context.done() is called, AWS may freeze any processes that haven't finished yet. This is what was causing AWS to log Connection closed on the second call to my API endpoint—the process was frozen just before Redis finished closing, then thawed on the next call, at which point it continued right where it left off, reporting that the connection was closed. Takeaway: if you want to close your database connection, make sure it's fully closed before you call one of those methods. You can do this by putting a callback in an event handler that's triggered by a connection close (.on('end'), in my case).
If you split your code into separate files and require them at the top of each file, like I did, Amazon will cache as many of those modules as possible in memory. If that's causing problems, try moving the require() calls inside a function instead of at the top of the file, then exporting that function. Those modules will then be re-imported whenever the function is run.
Here's my updated code. Note that I've also put my Redis configuration into a separate file, so I can import it into other Lambda functions without duplicating code.
The Event Handler
'use strict'
const lib = require('../lib/related')
module.exports.handler = function (event, context) {
lib.respond(event, (err, res) => {
if (err) {
return context.fail(err)
} else {
return context.succeed(res)
}
})
}
Redis Configuration
module.exports = () => {
const redis = require('redis')
const jsonify = require('redis-jsonify')
const redisOptions = {
host: process.env.REDIS_URL,
port: process.env.REDIS_PORT,
password: process.env.REDIS_PASS
}
return jsonify(redis.createClient(redisOptions))
}
The Function
'use strict'
const rt = require('./ritetag')
module.exports.respond = function (event, callback) {
const redis = require('./redis')()
const tag = event.hashtag.replace(/^#/, '')
const key = 'related:' + tag
let error, response
redis.on('end', () => {
callback(error, response)
})
redis.on('ready', function () {
redis.get(key, (err, res) => {
if (err) {
redis.quit(() => {
error = err
})
} else {
if (res) {
// Tag is found in Redis, so send results directly.
redis.quit(() => {
response = res
})
} else {
// Tag is not yet in Redis, so query Ritetag.
rt.hashtagDirectory(tag, (err, res) => {
if (err) {
redis.quit(() => {
error = err
})
} else {
redis.set(key, res, (err) => {
if (err) {
redis.quit(() => {
error = err
})
} else {
redis.quit(() => {
response = res
})
}
})
}
})
}
}
})
})
}
This works exactly as it should—and it's blazing fast, too.

Resources