Node-orm saving nested objects - node.js

My model is essentially
Idea has many Position
Idea has one User
Position has one Direction
Position has one Security
Position has one Idea (inverse of Idea has many Position)
I have added 'Q' promises to many of the node-orm functions to program in that manner. So find is now qFind etc...
I am struggling to find the best way to persist this to the DB (MySql):
User.qGet(1004).then(function(user) {
var newIdea = new Idea({
'openedOn': new Date()
})
newIdea.setUser(user, console.log)
Idea.qCreate(newIdea).then(function(idea) {
_.each(positions, function(position) {
Security.qFind({ticker: position.ticker}).then(function(securities){
var security = securities[0]
Direction.qFind({direction: position.direction}).then(function(directions){
var direction = directions[0]
var newPosition = Position({
'weight': 1
})
newPosition.setIdea(idea, console.log)
newPosition.setDirection(direction, console.log)
newPosition.setSecurity(security, console.log)
console.log(JSON.stringify(newPosition))
}) // Direction.qFind
}) // Security.qFind
}) // _.each
console.log(JSON.stringify(idea))
res.send(idea)
}) // Idea.qCreate
}) // User.find
Here are my problems
It is not working. When I set the idea, i get the error:
[Error: ER_BAD_NULL_ERROR: Column 'directionId' cannot be null]
The issue is I need to set three foreign keys in this object...
2. Is this the right approach to saving nested objects?

One solution is setting the id
var newPosition = Position({
'weight': 1
,'ideaId': idea.id
,'directionId': direction.id
,'securityId': security.id
})
Position.create(newPosition, function(e, i){
})
The problem is that /Associations/One.js calls save when setting an association.

I have decided to embrace the use of promises, and the Q library
Step 1 adding q nbind to interesting methods in my orm model. for instance:
model = db.models.direction
model['qFind'] = Q.nbind(model['find'], model);
Step 2 was adding instance methods to my Models to:
setDirectionByName: function(direction) {
var _this = this;
var internalSuccess = function(directions) {
_this.direction = directions[0]
_this.directionId = directions[0].id
return directions
}
return {
then: function(externalSuccess, externalFail) {
var success = _.compose(externalSuccess, internalSuccess)
Direction.qFind({direction: direction}).then(success, externalFail)
}
}
}
in this step i define an internal success method which stores the properties, and return a promise which utilizes composition to form an overall success function (the one from my model inside of the one passed to the 'then' invocation.
Step 3: handling the request and saving the data
User.qGet(1004).then(function(user) {
var newIdea = new Idea({
'openedOn': new Date()
})
newIdea.setUser(user, function(){})
Idea.qCreate(newIdea).then(function(idea) {
var positionPromises = _.map(positions, function(position) {
var newPosition = Position({
'weight': 1
,'ideaId': idea.id
})
Here I set the foreign keys, and wait for them to complete, before the Position.create is asynchronously kicked off, with a promise being returned.
return Q.all([
newPosition.setDirectionByName(position.direction)
, newPosition.setSecurityByTicker(position.ticker)
]).then(function(noop) {
//newPosition.setIdea(idea)
return Position.qCreate(newPosition)
})
}) // _.map
SO then with an array of promises for Position objects. Here I am going to wait for them all to fill, prior to returning the results tot he client
Q.all(positionPromises).then(function(positions){
console.log('RESULTS of POSITION Promises')
//console.log(JSON.stringify(positions))
//This doesn't seem to work as expected
//idea.setPositions(positions, function(e, o){ })
// kludge it
idea.positions = positions
console.log(JSON.stringify(idea))
console.log('/RESULTS of POSITION Promises')
res.send(idea)
})
console.log('FALL THROUGH')
}) // Idea.qCreate
}) // User.find

Related

In Node/Express, how to match or compare values in nested promises using csv-to-array

I have a Node/Express partial that is being called with AJAX, and is supposed to send a status update back to the view, after 2 subsequent API calls are made. This workflow relies on the csv-to-array module to read a ship-orders.csv file, and determine if the second API call (POST to Shipments) has already occured. It is supposed to do this by matching the OrderNumber in the csv file to the returned OrderNumber from the FindOrders endpoint (the first API).
The problem is that I am creating 2 arrays of order numbers to compare, but matching the first set of order numbers to the second set either always returns true or always returns false, and it very clearly should show "true" for the first record in the csv, and "false" for every other.
Before getting into the bulk of the code, here's the promise that reads the csv file into an array:
csv-to-array:
var csvShipPromise = new Promise(function(resolve, reject){
var csvColumns = ['ChannelName', 'OrderNumber', 'LineNumber', 'WarehouseCode', 'Qty', 'Carrier', 'TrackingNumber', 'Shipdate', 'ShipMethod'];
var csvShipArr;
var csvArr;
csvArray({
file: shipLog,
columns: csvColumns
}, function(err, array){
csvShipArr = array;
resolve(csvShipArr);
});
});
Next I have a long promise that gets executed when the request to the partial is made. The comparison between logged OrderNumbers and OrderNumbers that need to be posted to Shipments is the 5th "then" block (and it's commented in the code below).
router.get and chained promise:
router.get('/', function(req, res, next) {
findPromise.then(function(findData){
//Properly format xml string
var foundData = replaceAll(findData, '<', '<');
foundData = replaceAll(foundData, '>', '>');
return foundData;
}).then(function(foundData){
//Parse xml to JSON and stringify
var parsedFound;
parseString(foundData, function(err, result){ //uses an xml to json module
parsedFound = JSON.stringify(result);
});
return(parsedFound);
}).then(function(parsedStr){
//Parse JSON and return an array of objects
var parsedJson = JSON.parse(parsedStr);
var orders = parsedJson['soap:Envelope']['soap:Body'][0]['FindOrders'][0]['orders'][0]['order'];
return orders;
}).then(function(orders){
//Get only orders with a tracking number.
var trackArray = [];
var ord;
for(ord in orders){
var postObj = orders[ord];
if(postObj.TrackingNumber[0].length > 1){
trackArray.push(postObj);
}
}
return trackArray; //array of orders that contain tracking numbers
}).then(function(trackArray){
/**** This is the block that is causing problems. *****/
var tItm;
var loggedOrders = [];
for(tItm in trackArray){
var alreadyLogged = false;
var trackedItm = trackArray[tItm];
var trackedOrderNum = trackedItm.ReferenceNum;
csvShipPromise.then(function(csvOrders){
var csv;
var loggedOrderArr = [];
for (csv in csvOrders){
var csvItm = csvOrders[csv];
var csvOrderNum = csvItm.OrderNumber; //gets the OrderNumber as expected
loggedOrderArr.push(csvOrderNum);
}
return loggedOrderArr; //Return a simple array of all OrderNumbers
}).then(function(loggedOrderArr){
console.log(loggedOrderArr);
console.log(trackedOrderNum);
var ord;
for (ord in loggedOrderArr){
if(trackedOrderNum == loggedOrderArr[ord]){
console.log('found');
alreadyLogged = true;
}
else {
console.log('not found');
alreadyLogged = false;
}
}
return loggedOrderArr; //Simply returning this value because the alreadyLogged test isn't working.
});
/* Here is where the test fails.
It shouldn't, because there are, say, 4 OrderNumbers in the result of the first API call,
and only 1 Order number logged in the CSV.
So it should be true once, and false 3 times.
But it is true all the time.
*/
if(alreadyLogged){
console.log('found'); //Always logs true/found.
} else {
console.log('not found');
}
}
return trackArray; //Just passing the array to the view, for now.
}).then(function(obj){
res.send(obj);
return(obj);
}).catch(function(err){
console.log(err);
});
});
When I console.log the values of trackArray and loggedOrderArr, I see that there should be an intersection between an array of 4 values and an array of 1 value, but for some reason the comparison, if(trackedOrderNumber == loggedOrderArr[ord]) isn't working.
Alright, I'm gonna be honest, your code made my eyes swim. but as far as I can tell, a few things pop up:
move var alreadyLogged = false; to before the loop;
then add alreadyLogged = false; after if(alreadyLogged) statement
I think it has to do with scope. You are basically checking bool value of a var that has not changed yet because your promises has not resolved at the point of if(alreadyLogged)
Might I suggest a different approach?
why not make use of array.indexOf() ?
lets say you have two arrays to compare arrA & arrB; you can see if an item exists like so:
var index = arrA.indexOf(arrB[0]);
if(index == -1){
console.log('No Match');
}
else{
console.log('Match found');
}
no need for any preset flags to see if one array contains an element.
Hope it helps.
A bit more context:
var index = loggedOrderArray.indexOf(trackedOrderNum);
if(index == -1){
console.log('No Match');
// -1 basicaly means that there is not instance of trackedOrderNum in loggedOrderArray
}
else{
console.log('Match found');
}
What you are attempting appears to be reasonably simple. You are just overwhelming yourself with awkward flow control and bulky code.
As it stands, asynchronous flow isn't quite right chiefly due to parseString() not being promisified. A value returned from a raw nodeback won't propagate down a .then chain.
In addition, asynchronous flow will improve with :
application of Promise.all() up front to aggregate the two essential data-delivering promises csvShipPromise and findPromise.
the realisation that wholly synchronous steps in a promise chain can be merged with next step.
And, the bulk of the synchronous code will reduce by employing several Array methods:
Array.prototype.filter()
Array.prototype.map()
Array.prototype.includes()
Boiling it down to somewhere near the bare minimum, I get the following router.get() expression:
router.get('/', function(req, res, next) {
return Promise.all([csvShipPromise, findPromise])
.then([csvOrders, findData] => { // destructuring
let loggedOrderArr = csvOrders.map(order => order.OrderNumber);
let foundData = replaceAll(findData, '<', '<');
foundData = replaceAll(foundData, '>', '>');
return new Promise((resolve, reject) => { // promisify parseString() on the fly
parseString(foundData, (err, result) => {
if(err) reject(err);
else resolve(result['soap:Envelope']['soap:Body'][0].FindOrders[0].orders[0].order); // does this expression really return `orders` (plural)?
});
})
.then(orders => {
let trackArray = orders.filter(postObj => postObj.TrackingNumber[0].length > 1); // filter orders to eliminate those without a tracking number.
let loggedOrders = trackArray.filter(trackedItm => loggedOrderArr.includes(trackedItm.ReferenceNum));
// let unloggedOrders = trackArray.filter(trackedItm => !loggedOrderArr.includes(trackedItm.ReferenceNum));
res.send(loggedOrders); // or res.send(unloggedOrders), depending on what you want.
});
})
.catch(err => {
console.log(err);
res.error(err); // or similar
});
});
untested - I may have made mistakes, though hopefully ones that are simple to correct

MongoDB returning a null, but query works separately

In a post function, I am trying to retrieve the nth activity of a user (since I have a dropdown that return the index number of the activity). When I run the query
collection.find({'local.email':req.user.local.email},
{'local.activities':{$slice : [currActivity,1]}});
I receive the correct activity object in Robo3T.
But, when I call the same query in Node inside a post function, it returns an undefined.
app.post('/addlog',function(req,res){
var currActivity = req.body.curAct;
var score = req.body.score;
var comment = req.body.reason;
mongoose.connect('mongodb://****:****#ds044907.mlab.com:44907/intraspect',function (err, database) {
if (err)
throw err
else
{
db = database;
var collection = db.collection('users');
var retrievedAct = collection.find({'local.email':req.user.local.email},
{'local.activities':{$slice : [currActivity,1]}}).toArray().then(console.log(retrievedAct));
if (retrievedAct.length > 0) { printjson (retrievedAct[0]); }
console.log(currActivity);
console.log(retrievedAct[0]);
// console.log(req.body.newAct);
collection.update({'local.activities.name':retrievedAct[0]},
{$push: {'local.activities.log' : {
comments: comment,
score: score,
log_time: Date.now()
}}})
.then(function(){
res.redirect('/homepage');
})
.catch(function() {
console.log('Error');
});
}
});
});
I checked that the currActivity variable does infact contain the integer value for the nth activity.
If you want the result of collection.find().toArray(), as specified in the docs, you have two options:
Passing a callback to .toArray() like you did with mongoose.connect()
Using the Promise that it returns if you don't pass a callback
Now you are doing neither of them.
Also, you are mixing callback style and Promises in your code. I recommend you unificate your code. If you are using a Node.js version bigger than 8, using async/await could be nice, it makes it simpler.

How do I achieve a synchronous requirement using asynchronous NodeJS

I am adding user validation an data modification page on a node.js application.
In a synchronous universe, in a single function I would:
Lookup the original record in the database
Lookup the user in LDAP to see if they are the owner or admin
Do the logic and write the record.
In an asynchronous universe that won't work. To solve it I've built a series of hand-off functions:
router.post('/writeRecord', jsonParser, function(req, res) {
post = req.post;
var smdb = new AWS.DynamoDB.DocumentClient();
var params = { ... }
smdb.query(params, function(err,data){
if( err == null ) writeRecordStep2(post,data);
}
});
function writeRecord2( ru, post, data ){
var conn = new LDAP();
conn.search(
'ou=groups,o=amazon.com',
{ ... },
function(err,resp){
if( err == null ){
writeRecordStep3( ru, post, data, ldap1 )
}
}
}
function writeRecord3( ru, post, data ){
var conn = new LDAP();
conn.search(
'ou=groups,o=amazon.com',
{ ... },
function(err,resp){
if( err == null ){
writeRecordStep4( ru, post, data, ldap1, ldap2 )
}
}
}
function writeRecordStep4( ru, post, data, ldap1, ldap2 ){
// Do stuff with collected data
}
Additionally, because the LDAP and Dynamo logic are in their own source documents, these functions are scattered tragically around the code.
This strikes me as inefficient, as well as inelegant. I'm eager to find a more natural asynchronous pattern to achieve the same result.
Any promise library should sort your issue out. My preferred choice is bluebird. In summary they help you in performing blocking operations.
If you haven't heard about bluebird then just use it. It converts all function of a module and return promise which is then-able. Simply put, it promisifies all functions.
Here is the mechanism:
Module1.someFunction() \\do your job and finally pass the return object to next call
.then() \\Use that object which is return from the first call, do your job and return the updated value
.then() \\same goes on
.catch() \\do your job when any error occurs.
Hope you understand. Here is an example:
var readFile = Promise.promisify(require("fs").readFile);
readFile("myfile.js",
"utf8").then(function(contents) {
return eval(contents);
}).then(function(result) {
console.log("The result of evaluating
myfile.js", result);
}).catch(SyntaxError, function(e) {
console.log("File had syntax error", e);
//Catch any other error
}).catch(function(e) {
console.log("Error reading file", e);
});
I could not tell from your pseudo-code exactly which async operations depend upon results from with other ones and knowing that is key to the most efficient way to code a series of asynchronous operations. If two operations do not depend upon one another, they can run in parallel which generally gets to an end result faster. I also can't tell exactly what data needs to be passed on to later parts of the async requests (too much pseudo-code and not enough real code to show us what you're really attempting to do).
So, without that level of detail, I'll show you two ways to approach this. The first runs each operation sequentially. Run the first async operation, when it's done, run the next one and accumulates all the results into an object that is passed along to the next link in the chain. This is general purpose since all async operations have access to all the prior results.
This makes use of promises built into the AWS.DynamboDB interface and makes our own promise for conn.search() (though if I knew more about that interface, it may already have a promise interface).
Here's the sequential version:
// promisify the search method
const util = require('util');
LDAP.prototype.searchAsync = util.promisify(LDAP.prototype.search);
// utility function that does a search and adds the result to the object passed in
// returns a promise that resolves to the object
function ldapSearch(data, key) {
var conn = new LDAP();
return conn.searchAsync('ou=groups,o=amazon.com', { ... }).then(results => {
// put our results onto the passed in object
data[key] = results;
// resolve with the original object (so we can collect data here in a promise chain)
return data;
});
}
router.post('/writeRecord', jsonParser, function(req, res) {
let post = req.post;
let smdb = new AWS.DynamoDB.DocumentClient();
let params = { ... }
// The latest AWS interface gets a promise with the .promise() method
smdb.query(params).promise().then(dbresult => {
return ldapSearch({post, dbresult}, "ldap1");
}).then(result => {
// result.dbresult
// result.ldap1
return ldapSearch(result, "ldap2")
}).then(result => {
// result.dbresult
// result.ldap1
// result.ldap2
// doSomething with all the collected data here
}).catch(err => {
console.log(err);
res.status(500).send("Internal Error");
});
});
And, here's a parallel version that runs all three async operations at once and then waits for all three of the to be done and then has all the results at once:
// if the three async operations you show can be done in parallel
// first promisify things
const util = require('util');
LDAP.prototype.searchAsync = util.promisify(LDAP.prototype.search);
function ldapSearch(params) {
var conn = new LDAP();
return conn.searchAsync('ou=groups,o=amazon.com', { ... });
}
router.post('/writeRecord', jsonParser, function(req, res) {
let post = req.post;
let smdb = new AWS.DynamoDB.DocumentClient();
let params = { ... }
Promise.all([
ldapSearch(...),
ldapSearch(...),
smdb.query(params).promise()
]).then(([ldap1Result, ldap2Result, queryResult]) => {
// process ldap1Result, ldap2Result and queryResult here
}).catch(err => {
console.log(err);
res.status(500).send("Internal Error");
});
});
Keep in mind that due to the pseudo-code nature of the code in your question, this is also pseudo-code where implementation details (exactly what parameters you're searching for, what response you're sending, etc...) have to be filled in. This should be illustrative of promise chaining to serialize operations and the use of Promise.all() for parallelizing operations and promisifying a method that didn't have promises built in.

Mongoose promise built in but not working?

Or quite possibly I am doing it wrong, in fact, more than likely I am doing it wrong.
Have a table which contains a "tree" of skill, starting at the root level and may be as deep as ten levels (only two so far), but I want to return it as one big fat JSON structure, so I want to ask the database for each set of data, build my structure then ask for the next level.
Of course if I just send of my requests using mongoose, they will come back at any time, as they are all nice asyncronous calls. Normally a good things.
Looking at the documentation for Mongoose(using 4.1.1) it seems like it has a promise built in, but whenever I try to use it the api call throws a hissy fit and I get a 500 back.
Here is my simple function:
exports.getSkills = function(req,res) {
console.log("Will return tree of all skills");
for (var i = 0; i<10; i++){
var returnData = [];
console.log("Lets get level " + i );
var query = Skill.find({level: i });//The query function
var promise = query.exec; //The promise?
promise.then(function(doc) { //Totally blows up at this point
console.log("Something came back")
return "OK";
});
}
}
The Mongoose documentation on the subject can be found here
http://mongoosejs.com/docs/api.html#promise_Promise
var promise = query.exec;
// =>
var promise = query.exec()
exports.getSkills = function(req,res) {
console.log("Will return tree of all skills");
var p;
for (var i = 0; i < 10; i ++) {
if (i == 0 ) {
p = Skill.find({level:i}).exec();
} else {
p.then(function (){
return Skill.find({level:i}).exec()
})
}
p.then(function (data) {
//deal with your data
})
}
p.then(function () {
// deal with response
})
}

convert mongoose stream to array

I have worked with mongodb but quite new to mongoose ORM. I was trying to fetch data from a collection and the explain() output was showing 50ms. the overall time it was taking to fetch the data via mongoose was 9 seconds. Here is the query:
Node.find({'dataset': datasetRef}, function (err, nodes){
// handle error and data here
});
Then I applied index on the field I was querying on. The explain() output now showed 4ms. But the total time to retrieve data via mongoose did not change. Then i searched a bit and found that using lean() can help bring the performance of read queries in mongoose quite close to native mongodb
So I changed my query to:
Node.find({'dataset': datasetRef})
.lean()
.stream({transform: JSON.stringify})
.pipe(res)
This solved the performance issues completely. But the end result is a stream of JSON docs like this:
{var11: val11, var12: val12}{var21: val21, var22: val22} ...
How do I parse this to form an array of docs ? Or should I not be using stream at all ? In my opinion, there is no point using a stream if I am planning to form the array at backend, since I will then have to wait for all the docs to be read into memory. But I also think that parsing and creating the whole array at front end might be costly.
How can I achieve best performance in this case without clogging the network as well ?
UPDATE
I am trying to solve this problem using a through stream. However, I am not able to insert commas in between the JSON objects yet. See the code below:
res.write("[");
var through = require('through');
var tr = through(
function write(data){
this.queue(data.replace(/\}\{/g,"},{"));
}
);
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': JSON.stringify});
dbStream.on("end", function(){
res.write("]");
});
dbStream
.pipe(tr)
.pipe(res);
With this, I am able to get the "[" in the beginning and "]" at the end. However, still not able to get patten "}{" replaced with "},{". Not sure what am I doing wrong
UPDATE 2
Now figured out why the replace is not working. It appears that since I have specified the transform function as JSON.stringify, it reads one JSON object at a time and hence never encounter the pattern }{ since it never picks multiple JSON elements at a time.
Now I have modified my code, and written a custom transform function which does JSON.stringify and then appends a comma at the end. The only problem I am facing here is that I don't know when it is the last JSON object in the stream. Because I don't wanna append the comma in that case. At the moment, I append an empty JSON object once the end is encountered. But somehow this does not look like a convincing idea. Here is the code:
res.write("[");
function transform(data){
return JSON.stringify(data) + ",";
}
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': transform});
dbStream.on("end", function(){
res.write("{}]");
});
dbStream
.pipe(res);
The only problem I am facing here is that I don't know when it is the last JSON object in the stream.
But you do know which one is first. Knowing that, instead of appending the comma, you can prepend it to every object except the first one. In order to do that, set up your transform function inside a closure:
function transformFn(){
var first = true;
return function(data) {
if (first) {
first = false;
return JSON.stringify(data);
}
return "," + JSON.stringify(data);
}
}
Now you can just call that function and set it as your actual transform.
var transform = transformFn();
res.write("[");
var dbStream = db.node.find({'dataset': dataSetRef})
.lean()
.stream({'transform': transform});
dbStream.on("end", function(){
res.write("]");
});
dbStream
.pipe(res);
#cbajorin and #rckd both gave correct answers.
However, repeating this code all the time seems like a pain.
Hence my solution uses an extra Transform stream to achieve the same thing.
import { Transform } from 'stream'
class ArrayTransform extends Transform {
constructor(options) {
super(options)
this._index = 0
}
_transform(data, encoding, done) {
if (!(this._index++)) {
// first element, add opening bracket
this.push('[')
} else {
// following element, prepend comma
this.push(',')
}
this.push(data)
done()
}
_flush(done) {
if (!(this._index++)) {
// empty
this.push('[]')
} else {
// append closing bracket
this.push(']')
}
done()
}
}
Which in turn can be used as:
const toArray = new ArrayTransform();
Model.find(query).lean().stream({transform: JSON.stringify })
.pipe(toArray)
.pipe(res)
EDIT: added check for empty
I love #cdbajorin's solution, so i created a more readable version of it (ES6):
Products
.find({})
.lean()
.stream({
transform: () => {
let index = 0;
return (data) => {
return (!(index++) ? '[' : ',') + JSON.stringify(data);
};
}() // invoke
})
.on('end', () => {
res.write(']');
})
.pipe(res);
var mongoose = require('mongoose');
mongoose.connect('mongodb://localhost/shoppingdb');
var Sports = mongoose.model('sports', {});
var result = [];
var prefix_out = "your info";
Sports.find({"goods_category": "parts"}).
cursor().
on("data", function(doc){
//stream ---> string
var str = JSON.stringify(doc)
//sring ---> JSON
var json = JSON.parse(str);
//handle Your Property
json.handleYourProperty = prefix_out + json.imageURL;
result.push(result);
}).
on('error', function(err){
console.log(err);
}).
on('close', function(){
console.log(result);
});

Resources