So, I am new to nodejs. I don't just want to solve this problem, but I also want to learn this concept.
1 Prize has Many Winners. Both are separate tables. I first get list of prizes related to a certain id. I loop through those prizes using Promises.all() and then, for each prize, I query for winners.
Here is my code:
router.post("/getResult", function (req, res) {
const lottery_id = req.body.lottery_id;
const data = [];
//Find list of prices for given lottery_id. Note the sorting applied here
Prize.find({"lotid": lottery_id}).sort({name: 1})
.then(async function (prizes) {
try {
await Promise.all(prizes.map(async (prize) => {
//Sorting here works fine as confirmed by this log.
console.log(prize.name);
await Winner.find({"id": prize._id})
.then(function (winners) {
data.push({
prize: prize,
winners: winners
});
})
}));
//Sorting here is completely messed up.
// Something wrong with second query "Winner.find() and pushing data
res.send({success: 1, data: data});
} catch (err) {
console.log("Error " + err);
res.send({success: 0, data: err});
}
}).catch(function (err) {
res.send({success: 0, error: err});
})
});
The final result that I get doesn't follow the sorting applied to prize. May be, the query Winner.find() for 2nd prize finishes before 1st prize and hence, it is pushed in data before 1st prize.
You are seeing an unexpected sorted result because there is no coordination with how you are pushing values into the data array. prizes.map() does iterate over the prizes array sequentially, however, there is no guarantee that each mapped promise, or more specifically each Winner.find(), will become fulfilled in the same order chronologically.
One way you could fix this is to use the return value of Promise.all(). Here is an example:
router.post("/getResult", async function (req, res) {
const lottery_id = req.body.lottery_id;
try {
const prizes = await Prize.find({ "lotid": lottery_id }).sort({ name: 1 });
const prizeWinners = await Promise.all(prizes.map(async function(prize) {
const winners = await Winner.find({ "id": prize._id });
return {
prize,
winners
};
}));
res.send({ success: 1, data: prizeWinners });
} catch (err) {
res.send({ success: 0, error: err });
}
});
Related
I have a deleteMany request but I am having a hard time in filtering my context of the deleteMany returned value. It only returns 1 value deleted from pusherjs.
Here is my change stream code and pusher code in server side;
if (schedules.operationType === 'delete') {
const scheduleDetails = schedules.documentKey;
pusher.trigger('schedules', 'deleted', {
_id: scheduleDetails._id,
teamOne: scheduleDetails.teamOne,
teamTwo: scheduleDetails.teamTwo,
user: scheduleDetails.user,
isDone: scheduleDetails.isDone,
isStarted: scheduleDetails.isStarted,
date: scheduleDetails.date,
gameEvent: scheduleDetails.gameEvent,
});
}
Here is my pusher code in client side. I am using React by the way. It is stored in my context api;
ScheduleChannel.bind('deleted', ({ deletedSchedule }) => {
console.log(deletedSchedule);
setScheduleList(
scheduleList.filter((schedule) => schedule._id !== deletedSchedule._id)
);
});
here is my code on request;
exports.deleteallmatch = async (req, res) => {
try {
const { sub } = req.user;
const deletedMatches = await Schedule.deleteMany({ user: sub });
return res.status(201).json({
message: 'All of your schedule is successfully deleted!',
deletedMatches,
});
} catch (err) {
return res.status(400).json({
message: 'Something went wrong.',
});
}
};
The delete request is fine but I want to have realtime in my app. Cuz it happened that only one data is being send instead of many. How can I solve this?
The deleteMany() method returns an object that contains three fields:
n – number of matched documents
ok – 1 if the operation was successful
deletedCount – number of deleted documents
What you can do is:
First find all elements that match your query
Store them in some variable
Perform deleting
Return the stored variable
let deleted_items = await Schedule.find({ user: sub });
await Schedule.deleteMany({ user: sub });
return res.status(201).json({
message: 'All of your schedule is successfully deleted!',
deleted_items,
});
I need to query my database for users based on an array of emails and then execute a function for each result, I do this with eachAsync:
mongoose.model('User')
.find({email: {$in: ['foo#bar.com', 'bar#foo.com']}})
/* -- Run side effects before continuing -- */
.cursor()
.eachAsync((doc) => {
// do stuff
});
The problem I'm having is that I need to return a 404 status if any of the users with the given emails do not exist.
I've been looking through the mongoose docs but I can't seem to find a way of running "side effects" when working with queries. Simply "resolving" the DocumentQuery with .then doesn't work since you can't turn it into a cursor afterwards.
How can I achieve this?
You could try implementing it as shown below. I hope it helps.
// Function using async/await
getCursor: async (_, res) => {
try {
const result = []; // To hold result of cursor
const searchArray = ['foo#bar.com', 'bar#foo.com'];
let hasError = false; // to track error when email from find isn't in the array
const cursor = await mongoose.model('User').find({ email: { $in: searchArray } }).cursor();
// NOTE: Use cursor.on('data') to read the stream of data passed
cursor.on('data', (cursorChunk) => {
// NOTE: Run your side effect before continuing
if (searchArray.indexOf(cursorChunk.email) === -1) {
hasError = true;
res.status(404).json({ message: 'Resource not found!' });
} else {
// Note: Push chunk to result array if you need it
result.push(cursorChunk);
}
});
// NOTE: listen to the cursor.on('end')
cursor.on('end', () => {
// Do stuff or return result to client
if (!hasError) {
res.status(200).json({ result, success: true });
}
});
} catch (error) {
// Do error log and/or return to client
res.status(404).json({ error, message: 'Resource not found!' });
}
}
I am new to RESTful APIs and I've successfully implemented the GET and DELETE command for my API (GET localhost:4000/api, DELETE localhost:4000/api on Postman works fine).
Code for my get looks like:
router.get('/', function(req, res) {
user.find({}, function(err, users) {
if(err){
res.status(404).send({
message: err,
data: []
});
} else {
res.status(200).send({
message: 'OK',
data: users
});
}
});
});
Now I want to implement using parameters. For example, I want to implement something like sorting where
http://localhost/4000/api/users?sort={"name": 1}
(1- ascending; -1 - descending)
would mean sorting the name in ascending order.
What I am not sure how to do is:
How do I make the ?sort thing work?
How do I select which field to sort?
Please help!
You can only pass order(asc, desc), if you want to sort by name you can do like that
http://localhost/4000/api/users?order=-1
or
http://localhost/4000/api/users?&order=1
then in your controller
router.get('/', function(req, res) {
let order = req.query.order;
user
.find({})
.sort({"name": order})
.exec(function(err, users) {
if(err){
res.status(404).send({
message: err,
data: []
});
} else {
res.status(200).send({
message: 'OK',
data: users
});
}
});
});
These works if you use mongoose.js for mongodb
One cool solution that I frequently use is the following form
/api/users?sort=-name|+firstname
I use | for multiple fields sorting, and - for desc, + for asc
In express:
const { sort } = req.query; // sort = '-name|+firstname';
const order = sort.split('|') // will return an array ['-name', '+firstname']
.reduce((order, item) => {
const direction = item.charAt(0) === '-' ? -1 : 1;
const field = item.substr(1);
order[field] = direction;
return order;
}, {})
// order {'name': -1, 'firstname': 1}
users.find({}).sort(order); // do your logic
I would like to know if it's possible to run a series of SQL statements and have them all committed in a single transaction.
The scenario I am looking at is where an array has a series of values that I wish to insert into a table, not individually but as a unit.
I was looking at the following item which provides a framework for transactions in node using pg. The individual transactions appear to be nested within one another so I am unsure of how this would work with an array containing a variable number of elements.
https://github.com/brianc/node-postgres/wiki/Transactions
var pg = require('pg');
var rollback = function(client, done) {
client.query('ROLLBACK', function(err) {
//if there was a problem rolling back the query
//something is seriously messed up. Return the error
//to the done function to close & remove this client from
//the pool. If you leave a client in the pool with an unaborted
//transaction weird, hard to diagnose problems might happen.
return done(err);
});
};
pg.connect(function(err, client, done) {
if(err) throw err;
client.query('BEGIN', function(err) {
if(err) return rollback(client, done);
//as long as we do not call the `done` callback we can do
//whatever we want...the client is ours until we call `done`
//on the flip side, if you do call `done` before either COMMIT or ROLLBACK
//what you are doing is returning a client back to the pool while it
//is in the middle of a transaction.
//Returning a client while its in the middle of a transaction
//will lead to weird & hard to diagnose errors.
process.nextTick(function() {
var text = 'INSERT INTO account(money) VALUES($1) WHERE id = $2';
client.query(text, [100, 1], function(err) {
if(err) return rollback(client, done);
client.query(text, [-100, 2], function(err) {
if(err) return rollback(client, done);
client.query('COMMIT', done);
});
});
});
});
});
My array logic is:
banking.forEach(function(batch){
client.query(text, [batch.amount, batch.id], function(err, result);
}
pg-promise offers a very flexible support for transactions. See Transactions.
It also supports partial nested transactions, aka savepoints.
The library implements transactions automatically, which is what should be used these days, because too many things can go wrong, if you try organizing a transaction manually as you do in your example.
See a related question: Optional INSERT statement in a transaction
Here's a simple TypeScript solution to avoid pg-promise
import { PoolClient } from "pg"
import { pool } from "../database"
const tx = async (callback: (client: PoolClient) => void) => {
const client = await pool.connect();
try {
await client.query('BEGIN')
try {
await callback(client)
await client.query('COMMIT')
} catch (e) {
await client.query('ROLLBACK')
}
} finally {
client.release()
}
}
export { tx }
Usage:
...
let result;
await tx(async client => {
const { rows } = await client.query<{ cnt: string }>('SELECT COUNT(*) AS cnt FROM users WHERE username = $1', [username]);
result = parseInt(rows[0].cnt) > 0;
});
return result;
Here is my code :
server.get(url_prefix + '/user/:user_id/photos', function(req, res, next) {
if (!req.headers['x-session-id']) {
res.send({
status: {
error: 1,
message: "Session ID not present in request header"
}
})
} else {
User.findOne({
session_id: req.headers['x-session-id']
}, function(err, user) {
if (user) {
var user_id = req.params.user_id
Album.find({userId : user_id})
.populate('images')
.exec(function (err, albums) {
if (albums) {
albums.forEach(function(album, j) {
var album_images = album.images
album_images.forEach(function(image, i) {
Like.findOne({imageID : image._id, userIDs:user._id}, function(err,like){
if(like){
albums[j].images[i].userLike = true;
}
})
})
})
return res.send({
status: {
error: 0,
message: "Successful"
},
data: {
albums: albums
}
})
} else
return notify_error(res, "No Results", 1, 404)
})
}
else {
res.send({
status: {
error: 1,
message: "Invalid Session ID"
}
})
}
})
}
})
I am trying to add a extra value (albums[j].images[i].userLike = true;) to my images array, which is inside album array.
The problem is return res.send({ send the data before we get response from the foreach
How can I make it work, so that return should happen only after foreach has completed all the iteration
You will have to wait with invoking res.send until you fetched all the likes for all the images in each of the albums. E.g.
var pendingImageLikes = album_images.length;
album_images.forEach(function(image, i) {
Like.findOne({imageID : image._id, userIDs:user._id}, function(err,like){
if (like) {
albums[j].images[i].userLike = true;
}
if (!--pendingImageLikes) {
// we fetched all likes
res.send(
// ...
);
}
});
You might need to special case for album_images.length === 0.
Also, this does not take into account that you have multiple albums with multiple images each. You would have to delay res.send there in a very similar way to make this actually work. You might want to consider using a flow control library like first (or any other of your preference, just search for "flow control library") to make this a bit easier.
Also, you might want to consider not relying on semicolon insertion and manually type your semicolons. It prevents ambiguous expressions and makes the code easier to read.
Since you need your code to wait until all of the find operations have completed, I'd suggest you consider using the async package, and specifically something like each (reference). It makes using async loops cleaner, especially when dealing with MongoDB documents and queries. There are lots of nice features, including the ability to sequentially perform a series of functions or waterfall (when you want to perform a series, but pass the results from step to step).
> npm install async
Add to your module:
var async = require("async");
Your code would look something like this:
albums.forEach(function(album, j) {
async.each(album.images, function(album, done) {
Like.findOne({imageID: image._id, userIDs:user._id}, function(err, like){
if(!err && like){
albums[j].images[i].userLike = true;
}
done(err); // callback that this one has finished
})
})
}, function (err) { // called when all iterations have called done()
if (!err) {
return res.send({
status: {
error: 0,
message: "Successful"
},
data: {
albums: albums
}
});
}
return notify_error(res, "No Results", 1, 404);
});
});