I have to make a JSON RPC API that will have to support a big traffic and manage a postgreSQL database.
To do it I chose 'http' for the server and pg-promise for the database.
My problem is that I have some difficulty understanding and using the promises and async/wait, so I'm not sure I did it correctly
I put some code below
What I did
./server/server.js create an http server with requestHandler() as request handler. It does some checks and then calls async requestProcessor() to execute the method
The methods are defined in the repos (here a transaction in devices.js) as async and in my example below use await to wait for the required results
Some questions :
I have to define as async only the methods that should use await ?
In my SystemRepository, do I need to define 'InsertOneSystem' as async ?
How can I do a simple test script to test the load ? Like requests per second, ... ?
Thanks in advance !
A bit of code
server.js
const http = require('http');
const Database = require('../db');
const path = '/api/v1', port = 9000;
const methods = Database.methods;
/* hidden for brevity */
function sendResponse(res, response) {
if (response) {
const responseStr = JSON.stringify(response);
res.setHeader('Content-Type', 'application/json');
res.setHeader('Content-Length', responseStr.length);
res.write(responseStr);
} else {
/* hidden for brevity */
}
res.end();
}
const requestHandler = (req, res) => {
/* some checks, hidden for brevity */
const body = [];
req.on('data', (chunk) => {
body.push(chunk);
}).on('end', () => {
const bodyStr = Buffer.concat(body).toString();
// parse body en JSON
let request = JSON.parse(bodyStr);
requestProcessor(request).then((response) => {
sendResponse(res, response);
});
});
}
async function requestProcessor(request) {
let response = {
id: request.id,
jsonrpc: '2.0',
};
try {
response.result = await Promise.resolve(methods[request.method](request.params));
} catch (err) {
/* hidden for brevity */
}
return response;
}
const server = http.createServer(requestHandler);
server.listen(port, (err) => { /* hidden for brevity */ });
devices.js
'use strict';
/* hidden for brevity */
async function InsertOne(params) {
return Database.tx('Insert-New-Device', async function(transaction) {
let system = null, disks = null, cpus = null;
const query = pgp.helpers.insert(params.data.device, Collections.insert) + " RETURNING *";
let device = await transaction.one(query);
// if a system is present, insert with diviceId and return
if(params.data.system) {
params.data.system.deviceid = device.deviceid;
system = transaction.systems.InsertOne(params);
}
// same as system
if(params.data.disks) {
params.data.disks.deviceid = device.deviceid;
disks = transaction.disks.InsertOne(params);
}
// same as system
if(params.data.cpus) {
params.data.cpus.deviceid = device.deviceid;
cpus = transaction.cpus.InsertOne(params);
}
return {
device: device,
system: await system,
disks: await disks,
cpus: await cpus
}
})
.then(data => {
return data;
})
.catch(ex => {
console.log(ex)
throw new Error(ex);
});
}
/* hidden for brevity */
const DevicesRepository = {
InsertOne: InsertOne
};
module.exports = (db, pgpLib) => {
/* hidden for brevity */
return DevicesRepository;
}
systems.js
'use strict';
/* hidden for brevity */
async function InsertOneSystem(params) {
var system = params.data.system;
system.archid=2;
system.distributionid=3;
var query = pgp.helpers.insert(system, Collections.insert);
if(params.return) query += " RETURNING *";
return Database.one(query)
.then(data => {
return data;
})
.catch(ex => {
throw new Error(ex);
});
}
/* hidden for brevity */
const SystemsRepository = {
InsertOne: InsertOneSystem
};
module.exports = (db, pgpLib) => {
/* hidden for brevity */
return SystemsRepository;
}
I have to define as async only the methods that should use await ?
Have to - yes. But you should use async on all methods that return a promise, it's just a nice coding style, especially in TypeScript.
In my SystemRepository, do I need to define InsertOneSystem as async ?
You don't have to, but the same as above, it's a good coding style ;)
How can I do a simple test script to test the load ? Like requests per second, ... ?
I'm not answering that right now, as it is a whole separate area that deserves a separate question. You should investigate it yourself, how to test HTTP services load.
A little code improving, as you have plenty of redundancies:
async function InsertOne(params) {
return Database.tx('Insert-New-Device', async t => {
let system = null, disks = null, cpus = null;
const query = pgp.helpers.insert(params.data.device, Collections.insert) + " RETURNING *";
let device = await t.one(query);
// if a system is present, insert with diviceId and return
if(params.data.system) {
params.data.system.deviceid = device.deviceid;
system = await t.systems.InsertOne(params);
}
// same as system
if(params.data.disks) {
params.data.disks.deviceid = device.deviceid;
disks = await t.disks.InsertOne(params);
}
// same as system
if(params.data.cpus) {
params.data.cpus.deviceid = device.deviceid;
cpus = await t.cpus.InsertOne(params);
}
return {device, system, disks, cpus};
})
.catch(ex => {
console.log(ex); // it is better use "pg-monitor", or handle events globally
throw ex;
});
}
Related
Ima rookie using async/await but must now to use Redis-om. NN_walkd walks through a Redis database looking for loop-chains and does this by recursion. So the 2 questions I have is:
Am I calling the inner recursive NN_walkd calls correctly via async/await?
At runtime, the compSearchM proc is called first and seems to work (it gets 5 entries so it has to call NN_walkd 5 times). A NN_walkd is then recursively called, and then when it loops the 1st time it then calls compSearchK where the problems are. It seems to sit on the first Redis call in compSearchK (.search). Yet the code for compSearchK and compSearchM look basically identical.
main call
NN_walk = async function(req, db, cnode, pnode, chain, cb) {
var vegas, sneaker;
req.session.walk = [];
await NN_walkd(req, cnode, pnode, [], 1);
req.session.walk = null;
console.log('~~~~~~~~~~~~ Out of Walk ~~~~~~~~~~~~~~~');
cb();
};
redis.mjs
export class RedisDB {
constructor() {
...
this._companyRepo = ...
}
compSearchK(ckey) { // doesn't matter if I have a async or not here
return new Promise(async (resolve) => {
const sckey = await this._companyRepo.search()
.where('COMPANYKEY').equals(ckey)
.return.all();
if (sckey.length) {
const ttrr = await this._companyRepo.fetch(sckey[0].entityId);
resolve(ttrr.toJSON());
} else
resolve(null);
});
}
compSearchM(mkey) {
var tArr=[];
return new Promise(async (resolve) => {
const smkey = await this._companyRepo.search()
.where('MASTERKEY').equals(mkey)
.and('TBLNUM').equals(10)
.return.all();
if (smkey.length) {
for (var spot in smkey) {
const ttrr = await this._companyRepo.fetch(smkey[spot].entityId);
tArr.push(ttrr.toJSON());
}
resolve(tArr);
} else {
resolve(null);
}
});
}
walk.js
NN_walkd = async function(req, cnode, pnode, chain, lvl) {
...
if (cnode[1]) {
const sObj = await req.app.get('redis').compSearchK(cnode[1]);
if (sObj) {
int1 = (sObj.TBLNUM==1) ? null : sObj.CLIENTKEY;
(async () => await NN_walkd(req, [sObj.COMPANYKEY,int1], cnode, Array.from(chain), tlvl))()
}
} else {
const sArr = await req.app.get('redis').compSearchM(cnode[0]);
if (sArr.length) {
for (sneaker in sArr) {
(async () => await NN_walkd(req, [sArr[sneaker].COMPANYKEY,sArr[sneaker].CLIENTKEY], cnode, Array.from(chain), tlvl))()
}
} else {
console.log('no more links on this chain: ',cnode);
}
}
}
"doesn't matter if i have async or not here"
compSearchK(ckey) { // doesn't matter if I have a async or not here
return new Promise(async (resolve) => {
const sckey = await this._companyRepo.search()
.where('COMPANYKEY').equals(ckey)
.return.all();
if (sckey.length) {
const ttrr = await this._companyRepo.fetch(sckey[0].entityId);
resolve(ttrr.toJSON());
} else
resolve(null);
});
}
Of course it doesn't matter, because you're not using await inside of compSearchK!
You are using the explicit promise contructor anti-pattern. You should avoid it as it demonstrates lack of understanding. Here is compSearchK rewritten without the anti-pattern -
async compSearchK(ckey) {
// await key
const sckey =
await this._companyRepo.search()
.where('COMPANYKEY').equals(ckey)
.return.all();
// return null if key is not found
if (sckey.length == 0) return null;
// otherwise get ttrr
const ttrr = await this._companyRepo.fetch(sckey[0].entityId);
// return ttrr as json
return ttrr.toJSON();
}
Multiple users can call requests at the same time, so I want to do if one user calls to request and calculation of results is being started then when another user calls the same request results calculation is not being started, but wait for results which were asked for the first user. In other words, the calculation of the result should be started only if it is not 'locked' by another user, if it is - then waits for the result.
Edited
Code:
Results:
To guarantee that the result is not calculated multiple times for concurrent requests, you need to implement some kind of locking mechanism (as you expected).
Here's a very basic example of what your code could look like, that simply pushes requests to a queue if the mutex is currently locked. If not the result gets calculated and for all pending requests a response with the calculated value will be sent:
const express = require('express');
const app = express();
class Mutex {
constructor() {
this.queue = [];
this.locked = false;
}
lock() {
return new Promise((resolve, reject) => {
if (this.locked) {
this.queue.push([resolve, reject]);
} else {
this.locked = true;
resolve();
}
});
}
release(value) {
if (this.queue.length > 0) {
const [resolve, reject] = this.queue.shift();
resolve(value);
} else {
this.locked = false;
}
}
}
const getResultsMutex = new Mutex();
function getResults() {
return new Promise((resolve => {
setTimeout(() => resolve(Math.random()), 5000);
}))
}
function sendResponse(result, req, res) {
res.send("The result is " + result);
}
app.get('/getresults', async (req, res) => {
let result = await getResultsMutex.lock();
if (!result) {
result = await getResults();
}
sendResponse(result, req, res);
getResultsMutex.release(result);
});
app.listen(4000, function () {
console.log("Server is running at port 4000");
});
Like this?
/** #type {[express.Request, express.Response][]} */
let requestQueue = [];
/**
* #param {express.Request} req
* #param {express.Response} res
*/
async function processRequest( req, res ) {
var result = await GetResults();
res.end(result);
if (requestQueue.length !== 0) processRequest.apply(requestQueue.shift());
}
app.get("/getresults", (req, res) => {
if (requestQueue.length !== 0) return requestQueue.push([req, res]);
processRequest(req, res);
});
EDIT: If they should all receive the same result, then this code:
/** #type {Promise} */
let requestPromise = null;
app.get("/getresults", (req, res) => {
if (requestPromise === null) requestPromise = GetResults().finally(() => requestPromise = null); // returns a promise
res.send(await requestPromise);
});
I am performing the useMutation operation in the innermost loop and want to check the remaining cost upon every mutation. But it gets checked after all the mutations which is a problem because for some reason even if all the mutations get done(When the cost is under limits), It calls the .then() part for cost-checking and waiting for unknown reason.
Edit: I also noticed that even though the program is waiting again and again, the network status of chrome shows that all the mutations have happened and only the query of handleDiscountMore i.e. fetchMore is pending
const { loading, error, data, fetchMore, extensions, refetch } = useQuery(GET_COLLECTION, {
variables: { "id": coll.collection.id }
});
const [updatePrice] = useMutation(UPDATE_PRICE);
const redirectToModify = async (data, totalProducts) => {
wait(20000);
var cursor, fetchCount;
fetchCount = data.collection.products.edges.length;
totalProducts -= fetchCount;
data.collection.products.edges.map(async(product) => {
const results = await Promise.all(product.node.variants.edges.map(variant => {
if (selected == 'curr_price') {
//do stuff
}
else {
//do stuff
}
const productVariableInput = {
//Object
};
updatePrice({ variables: { input: productVariableInput } }).then(({ data, extensions }) => {
console.log("Remaining", extensions.cost.throttleStatus.currentlyAvailable)
console.log(data)
if (extensions.cost.throttleStatus.currentlyAvailable < 100) {
console.log("WAITING")
wait(18000);
}
}).catch(e => {
console.log(e)
})
console.log("AFTER")
return 0;
}))
})
if (totalProducts > 0) {
console.log("Calling")
wait(15000);
handleDiscountMore(data, cursor, totalProducts)
}
};
//Below function is Just for reference. It gets called before checking the throttleStatus above. afaik there's no problem with this
const handleDiscountMore = (data, cursor, pc) => {
console.log("Call received")
fetchMore({
variables: {
"id": data.collection.id,
"cursor": cursor
},
updateQuery: (
previousResult,
{ fetchMoreResult }
) => {
console.log("adding", fetchMoreResult);
redirectToModify(fetchMoreResult, pc);
// return fetchMoreResult;
}
})
}
Your map of maps is evaluating all promises at exactly the same time. Here's a cleaned up example that uses a nested for loop instead, which will wait for each request to finish before starting the next (note: I couldn't run it to test, so there's probably some bugs, but the idea is there):
const id = coll.collection.id;
const { loading, error, data, fetchMore, extensions, refetch } = useQuery(GET_COLLECTION, {
variables: { id }
});
const [updatePrice] = useMutation(UPDATE_PRICE);
// Given a product, returns a promise that resolves when all variants are processed
async function process_product(product){
const variants = product.node.variants.edges;
for (let i = 0; i < variants.length; i++){
await process_variant(variants[i]);
}
}
// Given a variant, returns a promise after the product is processed
async function process_variant(variant){
if (variant) {
console.log('doing stuff')
}
else {
console.log('doing other stuff')
}
const productVariableInput = {};
const variables = { input: productVariableInput };
try {
const {data, extensions} = await updatePrice({ variables });
const remaining_throttle = extensions.cost.throttleStatus.currentlyAvailable;
console.log("Remaining", remaining_throttle)
console.log(data)
// Change to a while loop to make sure you actually wait until resources are available
if (remaining_throttle < 100) {
console.log("WAITING")
await wait(18000);
}
} catch (e) {
console.log('error:', e);
}
console.log("AFTER")
return 0;
}
const redirectToModify = async (data, totalProducts) => {
await wait(20000);
let cursor;
const products = data.collection.product.edges;
totalProducts = totalProducts - products.length;
// Wait for all products to finish processing
for (var i = 0; i < products.length; i++){
await process_product(products[i]);
}
if (totalProducts > 0) {
console.log("Calling")
await wait(15000);
handleDiscountMore(data, cursor, totalProducts)
}
};
function updateQuery(previousResult, { fetchMoreResult }){
console.log("adding", fetchMoreResult);
redirectToModify(fetchMoreResult, pc);
return fetchMoreResult;
}
//Below function is Just for reference. It gets called before checking the throttleStatus above. afaik there's no problem with this
function handleDiscountMore(data, cursor, pc) {
console.log("Call received")
const variables = { id: data.collection.id, cursor };
fetchMore({ variables, updateQuery })
}
I am trying to run a series of tasks. Each task is dynamic, and could have different rules to follow. This will be executed on AWS-Lambda.
I have an array of JSON. It has a body with task name in it, and it also has attributes.
I need to dynamically load a javascript file with the name inside the body.
I need to wait until all is finished inside that task. Or it failed (regardless where). If the fail happens, I will need to write that data inside the current record inside the forEach loop.
I have old issue, where my forEach is finished first without waiting for the task to complete.
This is the forEach loop:
const jobLoader = require('./Helpers/jobLoader');
event.Records.forEach(record => {
const { body: jobName } = record;
const { messageAttributes } = record;
const job = jobLoader.loadJob(jobName);
job.runJob(messageAttributes).then(res => {
console.log('Show results');
return; // resume another record from forEach
}).catch(err => {
record.failed = true;
record.failureMessage = err.message;
console.log('I errored');
});
console.log('All Done');
});
The problem is that message All Done is printed, and then the message show results is printed. I get results from the database once it comes for execution.
This is the file that loads a task:
exports.loadJob = (jobName) => {
const job = require(`../Tasks/${jobName}`);
return job;
};
This is the file that contains actual task:
const mySqlConnector = require('../Storage/mySql');
exports.runJob = async (params) => {
let payload = {};
let dataToSend = await getUserName(params.userId.stringValue);
payload.dataToSend = dataToSend;
let moreDataToSend = await getEvenMoreData(params.userId.stringValue);
payload.moreDataToSend = moreDataToSend;
return await sendData(payload);
};
const getUserName = async (userId) => {
const query = 'SELECT * FROM user_data';
return await mySqlConnector.handler(query);
};
const getEvenMoreData = async (userId) => {
const query = 'SELECT * FROM user_data';
return await mySqlConnector.handler(query);
};
const sendData = (payload) => {
//this should be Axios sending data
};
And this is the mySql connector itself:
const mysql = require('promise-mysql');
exports.handler = async (query) => {
return mysql.createConnection({
host : '127.0.0.1',
user : 'root',
password : '',
database: 'crm'
}).then(conn =>{
let result = conn.query(query);
conn.end();
return result;
}).then(rows => {
//console.log("These are rows:" + rows);
return rows;
}).catch(error => {
return error;
});
};
The task file can have any number of things it needs to complete, which will be different when I start adding tasks.
I need that job.runJob completes, or that it catches an error, from whatever location it originated, so I can continue with the forEach.
I have tried using map and what not, but the end result is always the same.
What am I doing wrong?
You can use Promise.all method :
const promises = event.Records.map(record => {
const { body: jobName } = record;
const { messageAttributes } = record;
const job = jobLoader.loadJob(jobName);
return job.runJob(messageAttributes).then(res => {
console.log('Show results', res);
}).catch(err => {
record.failed = true;
record.failureMessage = err.message;
console.log('I errored');
throw new Error('Your error !');
});
});
try {
const results = await Promise.all(promises);
console.log('All done');
} catch (e) {
console.log('Something has an error', e);
}
don't forget to make your function async !
I managed to solve it, and still keep details about the execution:
Something like this:
for (let prop in event.Records){
const { body: jobName } = event.Records[prop];
const { messageAttributes } = event.Records[prop];
const job = jobLoader.loadJob(jobName);
await job.runJob(messageAttributes).then(res => {
console.log('Show results', res);
}).catch(err => {
event.Records[prop].failed = true;
event.Records[prop].failed = err.message;
console.log('I errored');
});
}
Background
I am using koa2 with some middlewares to build a basic api framework. But when I use "ctx.body" to send response in my router, the client side always receive "Not Found"
My code
./app.js
const Koa = require('koa');
const app = new Koa();
const config = require('./config');
//Middlewares
const loggerAsync = require('./middleware/logger-async')
const bodyParser = require('koa-bodyparser')
const jsonp = require('koa-jsonp')
app.use(loggerAsync())
app.use(bodyParser())
app.use(jsonp());
//Router
const gateway = require('./router/gateway')
app.use(gateway.routes(), gateway.allowedMethods());
app.use(async(ctx, next) => {
await next();
ctx.response.body = {
success: false,
code: config.code_system,
message: 'wrong path'
}
});
app.listen(3000);
./router/gateway.js
/**
* Created by Administrator on 2017/4/11.
*/
const Router = require('koa-router');
const gateway = new Router();
const df = require('../db/data-fetcher');
const config = require('../config');
const moment = require('moment');
const log4js = require('log4js');
// log4js.configure({
// appenders: { cheese: { type: 'file', filename: 'cheese.log' } },
// categories: { default: { appenders: ['cheese'], level: 'error' } }
// });
const logger = log4js.getLogger('cheese');
logger.setLevel('ERROR');
gateway.get('/gateway', async(ctx, next) => {
let time = ctx.query.time;
if (!time) {
ctx.body = {
success: false,
code: config.code_system,
message: 'Please input running times'
}
} else {
try {
let r = await df(`insert into gateway (g_time, g_result, g_date) values (${time}, '',now())`);
return ctx.body = {
success: true,
code: config.code_success
}
} catch (error) {
logger.error(error.message);
}
}
});
module.exports = gateway;
Then a db wrapper(mysql)
./db/async-db.js
const mysql = require('mysql');
const config = require('../config');
const pool = mysql.createPool({
host: config.database.HOST,
user: config.database.USERNAME,
password: config.database.PASSWORD,
database: config.database.DATABASE
})
let query = (sql, values) => {
return new Promise((resolve, reject) => {
pool.getConnection(function (err, connection) {
if (err) {
reject(err)
} else {
connection.query(sql, values, (err, rows) => {
if (err) {
reject(err)
} else {
resolve(rows)
}
connection.release()
})
}
})
})
}
module.exports = query
./db/data-fetcher.js
const query = require('./async-db')
async function performQuery(sql) {
let dataList = await query(sql)
return dataList
}
module.exports = performQuery;
My running result
When I launch server on port 3000 then accesss via http://localhost:3000/gateway?time=5, it always returns "Not found". But as I can see I have already used
return ctx.body = {
success: true,
code: config.code_success
}
to send response. I debugged and found that the database processing was done well, the new data was inserted well.
when I remove that db inserting line, it works well and returns success info.
let r = await df(`insert into gateway (g_time, g_result, g_date) values (${time}, '',now())`);
Is there anything wrong?
Thanks a lot!
Update 2017/04/27
Now I have found the problem. It's due to my custom middleware
const loggerAsync = require('./middleware/logger-async')
Code are like following -
function log( ctx ) {
console.log( ctx.method, ctx.header.host + ctx.url )
}
module.exports = function () {
return function ( ctx, next ) {
return new Promise( ( resolve, reject ) => {
// 执行中间件的操作
log( ctx )
resolve()
return next()
}).catch(( err ) => {
return next()
})
}
}
I changed it to async/await way then everything is working well.
Could anyone please tell me what's wrong with this middleware?
I guess, your problem is the ./db/data-fetcher.js function. When you are calling
let r = await df(`insert ....`)
your df - function should return a promise.
So try to rewrite your ./db/data-fetcher.js like this (not tested):
const query = require('./async-db')
function performQuery(sql) {
return new Promise((resolve, reject) => {
query(sql).then(
result => {
resolve(result)
}
)
}
}
module.exports = performQuery;
Hope that helps.
correct middleware:
function log( ctx ) {
console.log( ctx.method, ctx.header.host + ctx.url )
}
module.exports = function () {
return function ( ctx, next ) {
log( ctx );
return next()
}
}
reason: when resolve involved; promise chain was completed; response has been sent to client. although middleware remained will involved, but response has gone!
try to understand It seems that if you want to use a common function as middleware, you have to return the next function
nodejs(koa):Can't set headers after they are sent