I have added two cron jobs to run every 15 mins and every 3 hours during this time I'm unable to access API's. I'm getting a 502 bad gateway errors.
import Agenda from 'agenda';
import { updateCinemaData } from './updateCinemaData';
import { updateStreamingData } from './updateStreamingData';
import dotenv from 'dotenv';
dotenv.config();
const uri = process.env.MONGO_URI;
const MAX_CONCURRENCY_JOBS = 4;
const agenda = new Agenda({
db: {
address: uri,
options: { useNewUrlParser: true },
collection: 'schedule-jobs'
},
processEvery: '30 seconds',
});
const graceful = async () => {
await agenda.stop();
process.exit(0);
};
process.on('SIGTERM', graceful);
process.on('SIGINT', graceful);
agenda.define('get cinema data', { priority: 1 }, async (job, done) => {
console.log('get cinema data')
await updateCinemaData();
done();
});
agenda.define('get streaming data', { priority: 2 }, async (job, done) => {
console.log('get streaming data')
await updateStreamingData();
done();
});
(async function () {
const getCinemaData = agenda.create('get cinema data', {});
const getStreamingData = agenda.create('get streaming data', {});
await agenda.start();
agenda.maxConcurrency(MAX_CONCURRENCY_JOBS);
const jobNames = [
'get cinema data',
'get streaming data'
];
for (let jobName of jobNames) {
try {
let jobs = await agenda.jobs({ name: jobName });
for (let job of jobs) {
job.remove();
}
} catch (error) {
console.log(error);
}
}
await getCinemaData.repeatEvery('*/15 * * * *').save(); //Runs every 15 mins.
await getStreamingData.repeatEvery('* */2 * * *').save(); //Runs every 3 hours.
})();
export default agenda;
To catch all the exceptions I have added the below code in my server.js
process.on('uncaughtException', function (exception) {
log.error(exception);
});
with this also I'm not able to get the errors and this issue is only on the production. Also checked pm2 logs and Nginx logs but no use. Can anyone explain to me what is the issue I'm facing? I'm a beginner in Nodejs and not able to resolve this.
I want to make 100% coverage on this function with node-tap but I can't mock the error part, it always throw
Cannot find module 'create' Require stack: - /home/mptr8/Code/Projects/me/fastify-example/fastify-postgres/test/integration.js
But I have create function on my query.js file, what do I do wrong here? Why it doesn't invoke the method?
t.mock("../query.js", {
create: () => {
throw new Error();
},
});
I also try this combination, because query.js are dependent on db.js. Now the mock error gone but still I'm not getting the error throw from my fastify.inject.
t.mock("../db.js", {
"../query.js": {
create: () => { throw new Error() },
},
});
app.post("/", async (request, reply) => {
try {
const { body } = request;
const book = create(body.title);
reply.send(book);
} catch (error) {
// this part are not covered
reply.code(500).send({ message: "internal server error" });
}
});
here are my complete code. You can see the full code on this github repository.
// server.js
const fastify = require("fastify");
const {
migration,
create,
} = require("./query");
const db = require("./db");
function build(opts = {}) {
const app = fastify(opts);
migration();
app.post("/", async (request, reply) => {
try {
const { body } = request;
const book = create(body.title);
reply.send(book);
} catch (error) {
reply.code(500).send({ message: "internal server error" });
}
});
app.addHook("onClose", (_instance, done) => {
db.close();
done();
});
}
module.exports = build;
// db.js
const { Pool } = require("pg");
const pool = new Pool({
connectionString:
"postgresql://postgres:postgres#localhost:5432/fastify_postgres?schema=public",
});
module.exports = {
query: (text, params) => pool.query(text, params),
close: () => pool.end(),
};
// query.js
const db = require("./db");
async function migration() {
await db.query(`
CREATE TABLE IF NOT EXISTS books (
id serial PRIMARY KEY,
title varchar (255) NOT NULL
)
`);
}
async function create(title) {
return await db.query("INSERT INTO books (title) VALUES ($1)", [title]);
}
module.exports = { migration, create };
// test.js
const tap = require("tap");
const fastify = require("../server");
tap.test("coba", async (t) => {
const app = await fastify();
t.test("should success create books", async (t) => {
const response = await app.inject({
method: "POST",
url: "/",
payload: {
title: "Hello,World!",
},
});
t.equal(response.statusCode, 200);
});
t.test("should throw error", async (t) => {
const app = await fastify();
// it doesn't throw the error :((
t.mock("../query.js", {
create: () => {
throw new Error();
},
});
const response = await app.inject({
method: "POST",
url: "/",
payload: {
title: "Hello,World!",
},
});
t.equal(response.statusCode, 500);
// call app close on last test child to close app and db properly
app.close();
});
});
You should use the returned value by the t.mock function:
const build = t.mock({
"../server": {
"./query.js": {
create: () => { throw new Error() },
}
}
})
const app = await build({})
I'm trying to develop an API post, in middle execution I have validation such as check name already in use or not. I set error handler callback, it successfully send response 'Already registered', but when I checked to CLI, it show error
Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client
I dont know whats wrong, I use this error handler in the past and it seems look ok.
Here is my code in router:
createUserAccount: async function (req, res) {
const programData = req.body;
try {
await service.create(programData, function (code, err, result) {
if (err) {
if(code === 409){
res.status(HTTPSTATUS.CONFLICT).send(err.message);
} else {
res.status(HTTPSTATUS.BAD_REQUEST).send(err.message);
}
} else {
res.status(HTTPSTATUS.CREATED).json(result);
}
})
} catch (e) {
console.log(e)
res.status(HTTPSTATUS.BAD_REQUEST).json("Failed.");
}
Here is my function in my service:
const config = require('#configs/config.json')
const sequelize = require('sequelize');
const SEQUELIZE = new sequelize(config[env]);
module.exports = {
createAccount: async (name, password, callback) => {
try {
let check,
institution_id;
const checkName = await Profile.count(
{
where: {
name: name
}
}
);
//result checkName = 1
if(checkName > 0){
//then successfully execute this condition and
return callback(409, 'Already registered.', null);
//this show in console ----> POST /API/v1/user/profile 409 616.152 ms - 31
}
await Login.create({
username: email,
password: password
}).then(resLogin => {
const response = {
id: resLogin.id,
}
callback(201, null, response);
}).catch( error => {
callback(400, error, null);
})
} catch (e) {
callback(400, e, null);
}
},
create: async (payload, callback) => {
let loginID = null;
let {
profile,
address
} = payload;
let {
name,
email,
password
} = profile;
try {
await module.exports.createAccount(name, password, function (code, error, result) {
if(error){
const res = {message: error};
//what I need is the execution is end in here
return callback(code, res, null);
}
loginID = result.id;
});
//but the fact is it still execute this whole function if got callback error from createAccount()
let transaction = await SEQUELIZE.transaction();
await Address.create(address, {transaction})
.then( async resAddress => {
await transaction.commit();
return callback(201, null, resProfile);
}).catch(async e => {
return callback(400, e, null);
})
} catch (e) {
console.log(e);
callback(e, null);
}
};
I have an endpoint that receives files and creates a background task for uploading those files to S3.
In order to background the file uploads, I'm using Agenda (https://github.com/agenda/agenda). The only limitation is that I need to store the file in a format that is supported by MongoDB (which is what Agenda uses under the hood). In order to do that, I am converting the file as a buffer before sending that over to Agenda.
This is my code:
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return await Promise.all(
pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const { createReadStream } = await file;
const stream = createReadStream();
console.log("Setting up buffer...");
const buffer = await new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
});
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
console.log("Creating backgruond task...");
Agenda.now("uploadProgressPic", {
userId: currentUser.id,
progressPicId: progressPic.id,
filename: `${progressPic.id}-${bodyPart}.jpg`,
buffer,
});
console.log("Done.");
return progressPic;
})
);
}
),
},
This is fast on my local development server, but taking a long time to run in production because of the buffer stuff. The lines following console.log(Setting up buffer...) are taking a long time.
What I would like to do is:
Create and return an array of progressPics, one for each element in the pics array
Do the buffer stuff after the response has been sent so it doesn't hold up the front end.
Is this possible?
============ UPDATE ==========
So if I do not await for the promise, it complains that the request disconnected before the buffer finished:
const uploadProgressPic = async ({ file, progressPicId, userId, bodyPart }) => {
try {
const { createReadStream } = await file;
const stream = createReadStream();
console.log("Setting up buffer...");
const buffer = await new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
});
console.log("Done.");
console.log("Creating backgruond task...");
Agenda.now("uploadProgressPic", {
userId,
progressPicId,
filename: `${progressPicId}-${bodyPart}.jpg`,
buffer,
});
} catch (error) {
console.log("ERROR OCCURRED: ", error);
}
};
export default {
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
uploadProgressPic({
file,
progressPicId: progressPic.id,
userId: currentUser.id,
bodyPart,
});
return progressPic;
});
}
),
},
};
Error:
ERROR OCCURRED: BadRequestError: Request disconnected during file upload stream parsing.
at IncomingMessage.<anonymous> (/Users/edmundmai/Documents/src/acne-tracker/server/node_modules/graphql-upload/lib/processRequest.js:300:35)
at Object.onceWrapper (events.js:291:20)
at IncomingMessage.emit (events.js:203:13)
at IncomingMessage.EventEmitter.emit (domain.js:471:20)
at resOnFinish (_http_server.js:614:7)
at ServerResponse.emit (events.js:208:15)
at ServerResponse.EventEmitter.emit (domain.js:471:20)
at onFinish (_http_outgoing.js:649:10)
at onCorkedFinish (_stream_writable.js:678:5)
at afterWrite (_stream_writable.js:483:3)
at processTicksAndRejections (internal/process/task_queues.js:77:11) {
message: 'Request disconnected during file upload stream parsing.',
expose: true,
statusCode: 499,
status: 499
}
========== UPDATE 2 =============
Even trying to 1) simplify it and 2) move createReadStream() outside of uploadProgressPic shows the same error:
const uploadProgressPic = async ({
stream,
progressPicId,
userId,
bodyPart,
models,
}) => {
try {
console.log("Uploading to S3...");
const { Location: url, Key: key, Bucket: bucket } = await S3.upload({
stream,
folder: userId,
filename: `${progressPicId}-${bodyPart}.jpg`,
});
if (url && key && bucket) {
await models.ProgressPic.findOneAndUpdate(
{ _id: progressPicId },
{ $set: { url, key, bucket } },
{ new: true, useFindAndModify: false }
);
console.log("Done!");
}
} catch (error) {
console.log("ERROR OCCURRED: ", error);
}
};
export default {
Mutation: {
batchCreateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, bodyPart, localPath } = pic;
const progressPic = await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
const { createReadStream } = await file;
const stream = createReadStream();
uploadProgressPic({
stream,
progressPicId: progressPic.id,
userId: currentUser.id,
bodyPart,
models,
});
return progressPic;
});
}
),
},
};
Error:
Uploading to S3...
Uploading to S3...
Uploading to S3...
ERROR OCCURRED: BadRequestError: Request disconnected during file upload stream parsing.
at IncomingMessage.<anonymous> (/Users/edmundmai/Documents/src/acne-tracker/server/node_modules/graphql-upload/lib/processRequest.js:300:35)
at Object.onceWrapper (events.js:291:20)
at IncomingMessage.emit (events.js:203:13)
at IncomingMessage.EventEmitter.emit (domain.js:471:20)
at resOnFinish (_http_server.js:614:7)
at ServerResponse.emit (events.js:208:15)
at ServerResponse.EventEmitter.emit (domain.js:471:20)
at onFinish (_http_outgoing.js:649:10)
at onCorkedFinish (_stream_writable.js:678:5)
at afterWrite (_stream_writable.js:483:3)
at processTicksAndRejections (internal/process/task_queues.js:77:11) {
message: 'Request disconnected during file upload stream parsing.',
expose: true,
statusCode: 499,
status: 499
}
Done!
Funny thing is I still see a few Done!s in the logs even though it complains?
Not an expert on the subject but i have an idea that may work and a theory :
IDEA: if you're dealing with a big number a images than your problem may originate from the await Promise.all(). i recommend that you use parallelLimit from async to limit the parallel functions to be executed at a time otherwise you will have a performance problem.
THEORY: Maybe you can free the memory allocation after each use of Buffer to avoid memory leak problems and make your server more performant.
If i am wrong in anyway please correct me. I myself interested in the outcome of this problem.
Don't await the Promise.
new Promise((resolve, reject) => {
var buffers = [];
stream.on("data", function(data) {
buffers.push(data);
});
stream.on("end", function() {
const everything = Buffer.concat(buffers);
resolve(everything);
});
stream.on("error", function(e) {
reject(e);
});
}).then((buffer) => {
Agenda.now("uploadProgressPic", {
userId: currentUser.id,
progressPicId: progressPic.id,
filename: `${progressPic.id}-${bodyPart}.jpg`,
buffer,
});
}).catch((error) => {
// Clean up here
});
return models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
This way, you'll kick off creating the buffers, but won't actually wait for that to code to execute and will immediately create the ProgressPic instance and return it. Because the call to Agenda.now requires the resolved value of the Promise, we stick it inside the then callback. Note that it's important to append a catch as well -- if you don't, you could end up with an unhandled rejection.
You may want to use the catch callback to log the error and do any additional cleanup. For example, you may want to create the created ProgressPic (in which case, you should move the create call above the buffer Promise so you can reference the created instance).
If you're like me and die a little bit on the inside each time you have to type then, you can extract all that logic into a separate function:
const uploadProgressPic = async (/* parameters omitted for brevity */) => {
try {
const buffer = await new Promise(...)
Agenda.now(...)
} catch (error) {
// Do whatever
}
}
and then call it inside your resolver, again, without awaiting it:
uploadProgressPic()
return models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
I tried a variety of things that ended up not working because creating the buffer was just too slow in production for some reason. My ultimate solution that actually works was to split up the upload into two requests:
Backend:
Request #1: Create a progress pic, using the local file path as the URL
Request #2: Upload the file and update the progress pic
import { combineResolvers } from "graphql-resolvers";
import { isAuthenticated } from "./authorization";
import S3 from "../services/s3";
export default {
Query: {
progressPics: combineResolvers(
isAuthenticated,
async (parent, args, { models, currentUser }) => {
return await models.ProgressPic.find({ user: currentUser.id });
}
),
},
Mutation: {
createProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { bodyPart, localPath } = pic;
return await models.ProgressPic.create({
bodyPart,
user: currentUser.id,
url: localPath,
});
return progressPic;
});
}
),
updateProgressPics: combineResolvers(
isAuthenticated,
async (parent, { pics }, { models, currentUser }) => {
return pics.map(async (pic, i) => {
const { file, filename, progressPicId } = pic;
const { createReadStream } = await file;
const stream = createReadStream();
const { Location: url, Key: key, Bucket: bucket } = await S3.upload({
stream,
filename,
folder: currentUser.id,
});
return await models.ProgressPic.findOneAndUpdate(
{ _id: progressPicId },
{ $set: { url, key, bucket } },
{ new: true, useFindAndModify: false }
);
});
}
),
},
};
The frontend will then wait for the response from Request #1, and send Request #2 but ignore the response so it can just return immediately.
const createAndUploadProgressPics = async photos => {
const {
data: { createProgressPics: progressPics },
} = await createProgressPics({
variables: {
pics: photos.map((p, i) => ({
bodyPart: BODY_PARTS[i],
localPath: p.uri,
})),
},
});
updateProgressPics({
variables: {
pics: progressPics.map(({ id, bodyPart }, i) => {
return {
progressPicId: id,
filename: `${id}-${bodyPart}.jpg`,
file: photos[i],
};
}),
},
});
onFinish(progressPics);
navigation.goBack();
};
I would like to delete records from multiple table one after another. Under a single router, how could I call the following destroy functions one after another
function deleteFromTable1(param1, transaction) {
return table1_model.destroy({
where: { param1 },
transaction
});
}
function deleteFromTable2(param1, transaction) {
return table2_model.destroy({
where: { param1 },
transaction
});
}
function deleteFromTable3(param1, transaction) {
return table2_model.destroy({
where: { param1 },
transaction
});
}
When you say
Under a single router
I am assuming you have a setup similar to a Node.js and Express.js application where your route and handler look like the following:
app.post('/delete', function (req, res) {
// delete some items from database
})
In this case, you could do the following:
function deleteItems(req, res) {
// since Sequelize `destroy` calls return promises,
// you can just wrap all of them in `Promise.all`
// and "wait" for all of them to `resolve` (complete)
const handleSuccessfulDeletion = () => {
res.status(200).json({
status: 200,
message: 'OK'
})
}
const handleError = (error) => {
// ...
}
Promise.all([
deleteFromTable1(param1, transaction),
deleteFromTable2(param1, transaction),
deleteFromTable3(param1, transaction)
])
.then(handleSuccessfulDeletion)
.catch(handleError)
}
app.post('/delete', deleteItems)
This way, all deletion calls get completed before you return the success response.
Alternatively, if you need to your call need to be sequential, then you could do the following:
function deleteItems(req, res) {
const handleSuccessfulDeletion = () => {
// ...
}
const handleError = (error) => {
// ...
}
deleteFromTable1(param1, transaction)
.then(() => deleteFromTable2(param1, transaction))
.then(() => deleteFromTable3(param1, transaction))
.then(handleSuccessfulDeletion)
.catch(handleError)
}
app.post('/delete', deleteItems)
Here's an example - jsbin.com