I am trying to use await in an async function. Hwoever, I am getting the error
SyntaxError: await is only valid in async function
I cant figure out where the problem is in the below code.
insertPayrollAllowance: async function(companyId, jobId, basicSalary, employeeId, payrollId) {
return new Promise((resolve, reject) => {
sqlQuery = "SELECT * from tblJobAllowances Where tblJobAllowances.companyId = '" + companyId + "' and tblJobAllowances.jobId = '" + jobId + "' and deleterec = 0 order by id"
db.executeQuery(sqlQuery, null, (err, result) => {
if (err) {
reject(err);
} else {
for (var i = 0; i < result.length; i++) {
var insStatus = false;
var allowanceId = result[i].allowanceId
var allowanceName = await this.getAllowanceName(companyId, allowanceId);
var amountType = result[i].typeOfAmount;
var amount = result[i].amount;
var amountCredited;
var taxable = result[i].taxable;
if ($.trim(amountType) == "FIXED AMT") {
amountCredited = parseFloat(amount.toString().replace(/,/g, ''));
} else if ($.trim(amountType) == "% Of Income") {
amountCredited = (parseFloat(basicSalary.toString().replace(/,/g, '')) / 100) * parseFloat(amount.toString().replace(/,/g, ''));
};
var info = {
companyId: companyId,
employeeId: employeeId,
payrollId: payrollId,
allowanceName: allowanceName,
amountType: amountType,
amount: amount,
amountCredited: amountCredited,
taxable: taxable
}
sqlQuery = "INSERT INTO tblPayrollAllowanceDetails SET ?"
db.executeQuery(sqlQuery, info, (err, result) => {
if (err) {
insStatus = false;
} else {
insStatus = true;
}
});
};
resolve(insStatus);
}
});
});
}
I will be glad if anyone could point out why Im getting this error. Any assistance will be highly appreciated.
The await is inside another function that's not async
db.executeQuery(sqlQuery, null, (err, result) => {
It's inside this arrow function (err, result) => {
so to fix it just add async like so:
db.executeQuery(sqlQuery, null, async (err, result) => {
Related
I've a function like this in app.js, to connect SQL Server to return the recordset, but when I run this function at 11 times, it will be stopped
var config = require('./dbconfig');
const sql = require('mssql');
//create a get product function form the database
async function getProducts(skus, callback) {
let pool;
try {
let result = null;
let pool = await sql.connect(config);
let ps = new sql.PreparedStatement(pool);
// Construct an object of parameters, using arbitrary keys
let skuArray = skus.split(",");
console.log(skuArray);
let paramsObj = skuArray.reduce((obj, val, idx) => {
//trim val single quote
val = val.replace(/'/g, "");
obj[`id${idx}`] = val;
ps.input(`id${idx}`, sql.VarChar(200));
return obj;
}, {});
// Manually insert the params' arbitrary keys into the statement
let stmt = 'select a.area, a.article, a.total_qty, b.Avg_Qty ' +
'FROM [DB].[dbo].[Items] a join [brand].[dbo].[Sales] b on a.article = b.article ' +
'where a.Article in (' + Object.keys(paramsObj).map((o) => {return '#'+o}).join(',') + ')';
ps.prepare(stmt, function(err) {
if (err) {
let response = {"message": "failed","data": []};
return callback(response);
} else {
ps.execute(paramsObj, function(err, data) {
let response;
if (err) {
response = {"message": "failed","data": []};
} else {
let result = data.recordset;
let groupedResult = result.reduce((groupedData, product) => {
let article = product.article;
if (!groupedData[article]) {
groupedData[article] = [];
}
groupedData[article].push(product);
return groupedData;
}, {});
response = {"message": "success","data": groupedResult};
}
return callback(response);
ps.unprepare(function(err) {
if (err) {
console.log(err);
}
sql.close();
pool.close();
});
});
}
});
} catch (error) {
console.log(error);
} finally {
if (pool) {
sql.close();
pool.close();
}
}
}
dbconfig.js
var config = {
user: 'example',
password: 'support#example.com',
server: 'exampledb', // You can use 'localhost\\instance' to connect to named instance
//domain:"example.com",
database: 'example',
options: {
trustServerCertificate: true,
},
max: 20, min: 0, idleTimeoutMillis: 30000
}
//export config
module.exports = config;
Is this related to database connection issue? Anyone know what is the problem?
i've fixed
async function getProducts(skus, callback) {
let pool;
try {
let result = null;
pool = await sql.connect(config);
let ps = new sql.PreparedStatement(pool);
// Construct an object of parameters, using arbitrary keys
let skuArray = skus.split(",");
console.log(skuArray);
let paramsObj = skuArray.reduce((obj, val, idx) => {
//trim val single quote
val = val.replace(/'/g, "");
obj[`id${idx}`] = val;
ps.input(`id${idx}`, sql.VarChar(200));
return obj;
}, {});
// Manually insert the params' arbitrary keys into the statement
let stmt = 'select a.area, a.article, a.total_qty, b.Avg_Qty ' +
'FROM [DB].[dbo].[Items] a join [brand].[dbo].[Sales] b on a.article = b.article ' +
'where a.Article in (' + Object.keys(paramsObj).map((o) => {return '#'+o}).join(',') + ')';
await ps.prepare(stmt);
let data = await ps.execute(paramsObj);
let response;
if (data && data.recordset) {
let result = data.recordset;
let groupedResult = result.reduce((groupedData, product) => {
let article = product.article;
if (!groupedData[article]) {
groupedData[article] = [];
}
groupedData[article].push(product);
return groupedData;
}, {});
response = {"message": "success","data": groupedResult};
} else {
response = {"message": "failed","data": []};
}
ps.unprepare();
callback(response);
} catch (error) {
console.log(error);
let response = {"message": "failed","data": []};
callback(response);
} finally {
if (pool) {
pool.close();
}
sql.close();
}
}
I want to write a script that divides the lines read from the file into packages of 25, unfortunately the sample package returns 40 codes. I would like to do so that, for example, he divided me into packages of 25 items. I mean, I have, for example, 60 codes, this creates 2 packages of 25, and one with 10 codes. Unfortunately, I can't handle it.
const fs = require('fs');
fs.readFile('code.txt', function (err, data) {
if (err) throw err;
const array = data.toString().split("\n");
let count = 0;
let items = [];
for (let i in array) {
items.push({
PutRequest: {
Item: {
code: array[i]
}
}
});
let params = {
RequestItems: {
'TABLE_NAME': items
}
};
if (count === 25) {
dynamoDB.batchWrite(params, function (err, data) {
if (err) {
console.log(err);
} else {
count = 0;
items = [];
}
});
}else{
count++;
}
}
});
code.txt content
https://0bin.net/paste/NA8-4hkq#1Ohwt5uUkQqE0YscwnxTX2gxEqlvAUVKp1JRipBCsZg
Any idea what I do wrong?
Your dynamoDB.batchWrite() is asynchronous. Thus its callback is executed only after the loop has completed. So items and count are never reset ...
The easiest would be, if you could switch to an promise based approach like the following
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
while (lines.length > 0) {
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
}
}
A callback based approach could look like this
const BATCHSIZE = 25;
fs.readFile("code.txt", "utf-8", (err, data) => {
const lines = data.split("\n");
function writeBatch() {
if (!lines.length) return;
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
dynamoDb.batchWrite(params, err => {
if (err) ...
else writeBatch();
});
}
writeBatch();
}
The function writeBatch takes a certain number of lines from your original array and writes them into the database. Only afer the write into the DB was successful, it recursively calls itself and handles the next batch. But be aware, that this approach may exceed the maximum call stack size and throw an error.
You can also make either of this approaches not manipulate the lines array (which may be quite expensive), but just get out the current slice
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
let currentIndex = 0;
while (currentIndex < lines.length) {
const items = lines.slice(currentIndex, currentIndex + BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
currentIndex += BATCHSIZE;
}
}
and
const BATCHSIZE = 25;
fs.readFile("code.txt", "utf-8", (err, data) => {
const lines = data.split("\n");
function writeBatch(currentIndex) {
if (currentIndex >= lines.length) return;
const items = lines.slice(currentIndex, currentIndex + BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
dynamoDb.batchWrite(params, err => {
if (err) ...
else writeBatch(currentIndex + BATCHSIZE);
});
}
writeBatch(0);
}
To prevent stumbling into a maximum callstack exception you may also add the next batch to the eventloop and not call it recursively. Ie
dynamoDb.batchWrite(params, err => {
if (err) ...
else setTimeout(()=> { writeBatch(currentIndex + BATCHSIZE);}, 0);
});
This way you won't build up a massive callstack from recursive calls.
To keep track of how many records are already saved to the db you could simply store the current counter in a file. When you restart the process, load that file and check how many lines to skip. Don't forget to remove the file, once all records have been saved ... For example with the first approach:
const BATCHSIZE = 25;
const fs = require('fs').promises;
async function batchLoad() {
const lines = (await fs.readFile("code.txt", "utf-8")).split("\n");
const skipLines = 0;
try {
skipLines = +(await fs.readFile("skip.txt", "utf-8"));
if (isNaN(skipLines)) skipLines = 0;
lines.splice(0, skipLines);
} catch (e) {
skipLines = 0;
}
while (lines.length > 0) {
const items = lines.splice(0, BATCHSIZE).map(l => ({PutRequest: {Item: { code: l }}}));
const params = { RequestItems: { TABLE_NAME: items}};
await new Promise((resolve, reject) => {
dynamoDb.batchWrite(params, (err) => {
if (err) return reject(err);
resolve();
});
});
skipLines += BATCHSIZE;
await fs.writeFile("skip.txt", `${skipLines}`);
}
try {
await fs.unlink("skip.txt");
} catch (e) {
}
}
I am trying to retrieve attendance list along with user details.
I am using caminte.js(http://www.camintejs.com/) Cross-db ORM for database interaction.
Here is my code sample of model function "attendanceList".
exports.attendanceList = function (req, callback) {
var query = req.query;
var searchfilters = {};
if(!req.user){
callback({ code:400, status:'error', message: 'Invalid Request', data:{}});
}else{
searchfilters["vendor_id"] = parseInt(req.user._id);
}
if(query.location && parseString(query.location) != '') {
searchfilters["location"] = parseString(query.location);
}
if (query.device_details && parseString(query.device_details) != '') {
searchfilters["device_details"] = parseString(query.device_details);
}
if(query.created_on) {
searchfilters["created_on"] = query.created_on;
}
if(query.status) {
searchfilters["status"] = { regex: new RegExp(query.status.toLowerCase(), "i") };
}
var SkipRecord = 0;
var PageSize = 10;
var LimitRecord = PageSize;
var PageIndex = 1;
if(query.pagesize) {
PageSize = parseInt(query.pagesize);
}
if(query.pageindex) {
PageIndex = parseInt(query.pageindex);
}
if (PageIndex > 1) {
SkipRecord = (PageIndex - 1) * PageSize;
}
LimitRecord = PageSize;
var SortRecord = "created_on";
if(query.sortby && query.sorttype) {
var sortingBy = query.sortby;
var sortingType = 'ASC';
if(typeof query.sorttype !== 'undefined') {
sortingType = query.sorttype;
}
SortRecord = sortingBy + ' ' + sortingType;
}
Attendance.find({ where: searchfilters, order: SortRecord, limit: LimitRecord, skip: SkipRecord }, async function (err, result) {
if(err){
callback({ code:400, status:'error', message:'Unable to connect server', errors:err });
} else {
await result.map(function(row, i){
User.findById(parseInt(row.user_id), function(err, data){
if(err){
console.log(err);
} else {
result[i]['userDetails'] = data;
}
});
});
await Attendance.count({ where: searchfilters }, function (err, count) {
callback({ code:200, status:'success', message:'OK', total:count, data:result });
});
}
});
};
I am getting only attendance list without user details. How do I force to push user details into attendance list? Any Help!!
Thank You
This behavior is asynchronous. When you're making request to DB, your code keeps running, while task to get data comes to task queue.
To keep things simple, you need to use promises while handling asynchronous jobs.
Rewrite your code from this:
Attendance.find({ where: searchfilters, order: SortRecord, limit: LimitRecord, skip: SkipRecord }, async function (err, result) {
if(err){
callback({ code:400, status:'error', message:'Unable to connect server', errors:err });
} else {
await result.map(function(row, i){
User.findById(parseInt(row.user_id), function(err, data){
if(err){
console.log(err);
} else {
result[i]['userDetails'] = data;
}
});
});
await Attendance.count({ where: searchfilters }, function (err, count) {
callback({ code:200, status:'success', message:'OK', total:count, data:result });
});
}
});
To this:
const findAttendanceFirst = (searchFilters, SortRecord, LimitRecord, SkipRecord) => {
return new Promise((resolve, reject) => {
Attendance.find({ where: searchFilters, order: SortRecord, limit: LimitRecord, skip: SkipRecord }, (err, result) => {
if(err) return reject(err);
resolve(result);
});
});
}
const findUserByIdForUserDetails = (userId) => {
return new Promise((resolve, reject) => {
User.findById(parseInt(userId), function(err, data){
if(err) return reject(err);
resolve(data);
})
});
}
const getAttendanceCount = (searchFilters) => {
return new Promise((resolve, reject) => {
Attendance.count({ where: searchFilters }, (err, count) => {
if(err) return reject(err);
resolve(count);
});
})
}
So, now we can use this separate functions to make async behavior looks like sync.
try {
const data = await findAttendanceFirst(searchFilters, SortRecord, LimitRecord, SkipRecord);
for(let userData of data){
try {
userData.userDetails = await findUserByIdForUserDetails(userData.user_id);
} catch(e) {
// Some error happened, so no user details.
// you can set here null or nothing to userDetails.
}
}
let count;
try {
count = await getAttendanceCount(searchFilters);
} catch(e){
// Same as before.
}
const callBackData = { code:200, status:'success', message:'OK', total:count, data:result };
// And here you can do whatever you want with callback data. Send to client etc.
} catch(e) {
}
NB: I've not tested this code, it will be easier for yu to play with your actual data and use Promises and async/await
Just remember that each request to db is asynchronous, and you need to make your code wait for this data.
The request body sometimes (less than 1% of the time) is null when pulled into lambda. I am processing on the order of 14,000 request bodies one at a time. Any request bodies erring out have to be handled manually. Why is the body randomly coming in null?
Sample code ran from prompt (node index):
const async = require('async');
const _ = require('lodash');
const moment = require('moment');
const Client = require('node-rest-client').Client;
const fs = require('fs');
const input = require('./TestFull.json');
module.exports = () => {
const filename = `./eventfulFails-${new moment().format("YYYY-MM-DD-HHmmss")}.json`;
console.log('Start Time: ', new moment().format("HH:mm:ss"));
let failedObjects = {
events: [],
venues: [],
performers: []
};
async.parallel([
async.apply(processVenues, input.venues, failedObjects),
async.apply(processPerformers, input.performers, failedObjects)
], (lookupErr) => {
if (lookupErr) {
return console.error('Error processing venues and performers.', lookupErr);
}
console.log('Start Events: ', new moment().format("HH:mm:ss"));
async.waterfall([
async.apply(processEvents, input.events, failedObjects)
], (eventErr) => {
if (eventErr) {
console.log('Time of Failure: ', new moment().format("HH:mm:ss"));
return console.error('Error processing events.', eventErr);
}
console.log('End Time: ', new moment().format("HH:mm:ss"));
if (failedObjects.events.length || failedObjects.venues.length || failedObjects.performers.length) {
const stream = fs.createWriteStream(filename);
stream.once('open', function(fd) {
stream.write(JSON.stringify(failedObjects));
stream.end();
});
}
});
});
};
function processVenues(venues, failedObjects, callback) {
const calls = [];
for (let i = 0; i < venues.length; i++) {
const v = venues[i];
calls.push(async.apply((venue, postCallback) => {
const client = new Client();
const args = {
data: venue,
headers: {"Content-Type": "application/json"}
};
client.post('https://hm1br4yo34.execute-api.us-west-2.amazonaws.com/dev/eventful-venue', args, (data, response) => {
if (response.statusCode !== 200 && response.statusCode !== 201) {
failedObjects.venues.push({
venue,
response
});
console.log('venue status code: ', response);
console.log('venue data: ', venue);
}
return postCallback(null);
});
}, v));
}
async.waterfall(calls, callback);
}
function processPerformers(performers, failedObjects, callback) {
const calls = [];
for (let i = 0; i < performers.length; i++) {
const v = performers[i];
calls.push(async.apply((performer, postCallback) => {
const client = new Client();
const args = {
data: performer,
headers: {"Content-Type": "application/json"}
};
client.post('https://hm1br4yo34.execute-api.us-west-2.amazonaws.com/dev/eventful-performer', args, (data, response) => {
if (response.statusCode !== 200 && response.statusCode !== 201) {
failedObjects.performers.push({
performer,
response
});
console.log('performer status code: ', response);
console.log('performer data: ', performer);
}
return postCallback(null);
});
}, v));
}
async.waterfall(calls, callback);
}
function processEvents(events, failedObjects, callback) {
const calls = [];
for (let i = 0; i < events.length; i++) {
const v = events[i];
calls.push(async.apply((event, postCallback) => {
const client = new Client();
const args = {
data: event,
headers: {"Content-Type": "application/json"}
};
client.post('https://hm1br4yo34.execute-api.us-west-2.amazonaws.com/dev/eventful', args, (data, response) => {
if (response.statusCode !== 200 && response.statusCode !== 201) {
failedObjects.events.push({
event,
response
});
console.log('event status code: ', response);
console.log('event data: ', event);
}
return postCallback(null);
});
}, v));
}
async.waterfall(calls, callback);
}
if (!module.parent) {
module.exports();
}
Code of function processVenues (eventful-venue-load) that is called:
const _ = require('lodash');
const AWS = require('aws-sdk');
const async = require('async');
const sdk = require('#consultwithmikellc/withify-sdk');
const host = process.env.aurora_host;
const user = process.env.aurora_user;
const database = process.env.aurora_database;
let decryptedPassword;
const lambda = new AWS.Lambda({
region: 'us-west-2' //your region
});
class WithifyEventCreate extends sdk.Lambda {
constructor(event, context, keysToDecrypt) {
super(event, context, keysToDecrypt);
this.getLocation = this.getLocation.bind(this);
this.insertLocations = this.insertLocations.bind(this);
this.insertLocationImages = this.insertLocationImages.bind(this);
}
decryptedKey(key, value) {
switch (key) {
case 'aurora_password':
decryptedPassword = value;
break;
}
}
initializeComplete() {
this.connect(host, user, decryptedPassword, database, true);
}
connectComplete() {
async.waterfall(
[
this.getLocation,
this.insertLocations,
this.insertLocationImages
]
);
}
getLocation(callback) {
const {id: eventfulLocationID} = this.body;
this.connection.query('SELECT * FROM `Location` WHERE `eventfulLocationID` = ?',
[eventfulLocationID],
(err, results) => {
if (err) {
// error call block
return this.sendResponse(err, this.createResponse(500));
} else if (results.length === 1) {
console.log('Invoking withify-eventful-venue-update...');
lambda.invoke({
FunctionName: 'withify-eventful-venue-update',
Payload: JSON.stringify(this.event)
}, (error, data) => {
return this.sendResponse(null, JSON.parse(data.Payload));
});
} else if (results.length > 1) {
return this.sendResponse(`The location lookup produced multiple results. event:${JSON.stringify(this.body)}`, this.createResponse(500));
} else {
return callback(null);
}
}
);
}
insertLocations(callback) {
const {name: locationName, address: street, city, region_abbr: state, postal_code,
description, id: eventfulLocationID, latitude: lat, longitude: lng, withdrawn: locationWithdrawn} = this.body;
let addresses = street.concat(', ', city, ', ', state, ', ', postal_code);
if (!description.length){
var phones = "";
}else{
var re = /(([\(][0-9]{3}[\)][\s][0-9]{3}[-][0-9]{4})|([0-9]{3}[-][0-9]{3}[-][0-9]{4})|([0-9]{3}[\.][0-9]{3}[\.][0-9]{4}))/i;
this.body.found = description.match(re);
if (!this.body.found){
var phone = "";
}else{
if (!this.body.found.length){
var phone = "";
}else{
var phone = this.body.found[0];
}
}
}
this.connection.query('INSERT IGNORE INTO `Location` (`locationName`, `address`, ' +
'`phone`, `lat`, `lng`, `eventfulLocationID`, `locationWithdrawn`) VALUES (?, ?, ?, ?, ?, ?, ?)',
[locationName, addresses, phone, lat, lng, eventfulLocationID, locationWithdrawn],
(err, results) => {
if (err) {
return this.sendResponse(err, this.createResponse(500));
}
this.body.locationID = results.insertId;
return callback(null);
}
);
}
insertLocationImages(callback) {
var altText = "";
const images = _.flatten(this.body.images.map(im => {
return _.map(im.sizes, (ims, idx) => {
const title = `Image ${idx}`;
return [
this.body.locationID,
this.body.name,
ims.url,
null,
null,
this.body.id,
ims.width,
ims.height
];
});
}));
if(!images[0]){
return this.sendResponse(null, this.createResponse(201, this.body));
}
this.connection.query('INSERT IGNORE INTO `LocationImage` (`locationID`, `imageTitle`, `imageUrl`, ' +
'`imageName`, `altText`, `eventfulLocationID`, `width`, `height`) VALUES ?',
[images],
(err, results) => {
if (err) {
return this.sendResponse(err, this.createResponse(500));
} else if (results.affectedRows !== images.length) {
return this.sendResponse('The image inserts did not affect the right number' +
' of rows.', this.createResponse(500));
}
return this.sendResponse(null, this.createResponse(201, this.body));
}
);
}
}
exports.handler = (event, context) => {
const withifyEventCreate = new WithifyEventCreate(event, context, ['aurora_password']);
withifyEventCreate.initialize([decryptedPassword]);
};
I am creating an application where a user can have many rooms and each room can have many channels, here is my code when retrieving the rooms and corresponding channels:
getRooms: function (req, res) {
User.find({id: req.cookies.claver_id}).exec(function (err, result) {
if (err) {
return res.send(400);
}
rooms = result[0].rooms;
if (rooms.length === 1) {//No room defaults to ['']
return res.send(400);
}
var roomsObj = {};
var roomsArr = [];//we will place the roomsObj inside the roomsArr
var chansObj = {};
var chansArr = [];
async.each(rooms, function (roomId, cb){
roomsObj = {};
if (roomId !== '') {
Rooms.findOne({id: roomId}).exec(function (err, room){
roomName = room.name;
inviteLink = room.inviteLink;
roomsObj.name = roomName;
roomsObj.id = roomId;
roomsObj.inviteLink = inviteLink;
var channels = room.channels;
async.each(channels, function (channelId, cb) {
chansObj = {};
Channels.findOne({id: channelId}).exec(function (err, channel){
chansObj.name = channel.channelName;
chansObj.id = channelId;
chansObj.type = channel.channelType;
chansArr.push(chansObj);
cb();
});
},
function (err) {
});
});
}
cb();
}, function (err) {
roomsObj.channels = chansArr;
roomsArr.push(roomsObj);
sails.log(roomsArr);
});
});
}
It is suppose to return a javascript object with the following structure:
[ { name: "Room Name",
roomId: "Room Id",
inviteLink: "Room Invite Link",
channels: [
{
name: "Channel Name",
id: "channel Id"
}
]
}
]
But I always get an empty array because async.each(rooms, function (roomId, cb){ }) does not wait for async.each(channels, function (channelId, cb) {}) to complete, so I have empty room object. Please how do I solve this issue ?
You should call your rooms's callback loop after completing you channels loop.
You should do something like this:
getRooms: function (req, res) {
User.find({id: req.cookies.claver_id}).exec(function (err, result) {
if (err) {
return res.send(400);
}
rooms = result[0].rooms;
if (rooms.length === 1) {//No room defaults to ['']
return res.send(400);
}
var roomsObj = {};
var roomsArr = [];//we will place the roomsObj inside the roomsArr
var chansObj = {};
var chansArr = [];
async.each(rooms, function (roomId, callback1){
roomsObj = {};
if (roomId !== '') {
Rooms.findOne({id: roomId}).exec(function (err, room){
roomName = room.name;
inviteLink = room.inviteLink;
roomsObj.name = roomName;
roomsObj.id = roomId;
roomsObj.inviteLink = inviteLink;
var channels = room.channels;
var i=0;
async.each(channels, function (channelId, callback2) {
chansObj = {};
Channels.findOne({id: channelId}).exec(function (err, channel){
chansObj.name = channel.channelName;
chansObj.id = channelId;
chansObj.type = channel.channelType;
chansArr.push(chansObj);
i++;
if(i===(channels.length-1)){
i=0;
callback1();
}else{
callback2();
}
});
},
function (err) {
});
});
}
}, function (err) {
roomsObj.channels = chansArr;
roomsArr.push(roomsObj);
sails.log(roomsArr);
});
});
}
I solved it, it really was a case for promises, I used bluebird promise combined with async - the modified code:
getRooms: function (req, res) {
User.find({id: req.cookies.claver_id}).exec(function (err, result) {
if (err) {
return res.send(400);
}
rooms = result[0].rooms;
if (rooms.length === 1) {//No room defaults to ['']
return res.send(400);
}
var roomsObj = {};
var roomsArr = [];//we will place the roomsObj inside the roomsArr
var chansObj = {};
var chansArr = [];
Promise.each(rooms, function (roomId, callback1){
roomsObj = {};
if (roomId !== '') {
async.series ([
function () {
Rooms.findOne({id: roomId}).then(function (room){
roomName = room.name;
inviteLink = room.inviteLink;
roomsObj.name = roomName;
roomsObj.id = roomId;
roomsObj.inviteLink = inviteLink;
channels = room.channels;
sails.log(roomName);
})
}
]);
return Promise.each(channels, function (channelId) {
return Promise.all([
Channels.findOne({id: channelId}).then(function (channel){
chansObj = {};
chansObj.name = channel.channelName;
chansObj.id = channelId;
chansObj.type = channel.channelType;
chansArr.push(chansObj);
sails.log(chansObj);
})
]).then(function () {
sails.log('done one');
});
}).then(function () {
roomsObj.channels = chansArr;
roomsArr.push(roomsObj);
sails.log('done all');
chansArr = [];
});
}
}).then(function () {
sails.log(roomsArr);
sails.log("grand finish");
});
});
}
Thanks to everyone who contributed.