forEach using generators in Node.js - node.js

I'm using Koa.js framework and Mongoose.js module.
Normally to get a result from MongoDB I code like this:
var res = yield db.collection.findOne({id: 'my-id-here'}).exec();
But I need to execute this line for every element of an array named 'items'.
items.forEach(function(item) {
var res = yield db.collection.findOne({id: item.id}).exec();
console.log(res) // undefined
});
But this code doesn't run as yield is in the function. If I write this:
items.forEach(function *(item) {
var res = yield db.collection.findOne({id: item.id}).exec();
console.log(res) // undefined
});
I'm not getting the result in res variable either. I tried to use 'generator-foreach' module but that didn't worked like this.
I know that this is my lack of knowledge about the language literacy of Node.js. But can you guys help me finding a way how to do this?

You can yield arrays, so just map your async promises in another map
var fetchedItems = yield items.map((item) => {
return db.collection.findOne({id: item.id});
});

The accepted answer is wrong, there is no need to use a library, an array is already an iterable.
This is an old question, but since it has no correct answer yet and it appears on the first page on google search for the key terms "iterators and forEach" I will respond the question:
There is no need to iterate over an array, since an array already conforms to the iterable API.
inside your generator just use "yield* array" (note the * )
yield* expression is used to delegate to another generator or iterable object
Example:
let arr = [2, 3, 4];
function* g2() {
yield 1;
yield* arr;
yield 5;
}
var iterator = g2();
console.log(iterator.next()); // { value: 1, done: false }
console.log(iterator.next()); // { value: 2, done: false }
console.log(iterator.next()); // { value: 3, done: false }
console.log(iterator.next()); // { value: 4, done: false }
console.log(iterator.next()); // { value: 5, done: false }
console.log(iterator.next()); // { value: undefined, done: true }
For examples and in depth information visit:
https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Operators/yield*

Thanks guys, I've done this using the 'CO' module. Thanks.
var co = require('co');
items.forEach(co(function* (item) {
var img = yield db.collection.findOne({id: item.id}).exec();
}));
EDIT: With the latest version of CO, you need co.wrap() for this to work.

Related

Best way to navigate throught a JSON in Node while validating the path

I'm trying to get some info out of a API call in Nodejs, structured something like a JSON:
{
"generated":"2019-11-04T09:34:11+00:00",
"event":{
"id":"19040956",
"start_":"2019-11-16T11:30:00+00:00",
"event_context":{
"sport":{
"id":"1",
"name":"Soccer"
}
}
}
}
I'm not sure about the presence of none of these fields(Json could be incomplete).
Is there a better way to get the value of "name" in JSON.event.event_context.sport.name without an ugly if to not get errors like "cannot get field 'sport' of undefined"?
Currently, I'm doing
if(json.event && json.event.event_context && json.event.event_context.sport) {
return json.event.event_context.sport.name;
}
Is there a better way?
Thank you!
what do you mean by saying "I'm not sure about the presence of none of these fields"?
i don't understand what your'e trying to achieve.
Looks like there is also an interesting package that will allow more conditions on searching json :
https://www.npmjs.com/package/jspath
let getNested = (path, obj) => {
return path.split(".").reduce( getPath, obj);
}
let getPath = (path, key) => {
return (path && path[key]) ? path[key] : null
}
let test = {
"foo": "bar",
"baz": { "one": 1, "two": ["to", "too", "two"] },
"event": { "event_context": { "sport": { "name": "soccer" } } }
}
console.log(getNested("none", test))
console.log(getNested("baz.one", test))
console.log(getNested("baz.two", test))
console.log(getNested("event.event_context.sport.name", test))
You can use lodash get to get a potentially deeply-nested value, and also specify a default in case it doesnt exist.
Example
const _ = require('lodash');
const my_object = {
"generated":"2019-11-04T09:34:11+00:00",
"event":{
"id":"19040956",
"start_":"2019-11-16T11:30:00+00:00",
"event_context":{
"sport":{
"id":"1",
"name":"Soccer"
}
}
};
_.get(my_object, 'event.event_context.sport.name'); // "Soccer"
_.get(my_object, 'event.event_context.sport.nonExistentField', 'default val'); // "default val"
Article: https://medium.com/#appi2393/lodash-get-or-result-f409e73e018b
You can check by using a function to check object keys like :
function checkProperty(checkObject, checkstring){
if(!checkstring)
return false;
var propertiesKeys = checkstring.split('.');
propertiesKeys.forEach(element => {
if(!checkObject|| !checkObject.hasOwnProperty(element)){
return false;
} else {
checkObject= checkObject[element];
}
})
return true;
};
var objectToCheck = {
"generated":"2019-11-04T09:34:11+00:00",
"event":{
"id":"19040956",
"start_":"2019-11-16T11:30:00+00:00",
"event_context":{
"sport":{
"id":"1",
"name":"Soccer"
}
}
}
}
if (checkProperty(objectToCheck ,'event.event_context.sport.name'))
console.log('object to find is : ', objectToCheck .event.event_context.sport.name;)
Yeah there are better ways!
For example, you could use lodash's get() method to reach a nested value.
var object = { 'a': [{ 'b': { 'c': 3 } }] };
_.get(object, 'a[0].b.c');
// => 3
But there is also a native solution.
Currently (11.2019) only Babel can handle this.
I am speaking of Optional chaining. It's new in the Ecmascript world.
Why I like it? Look here!
// Still checks for errors and is much more readable.
const nameLength = db?.user?.name?.length;
What happens when db, user, or name is undefined or null? With the optional chaining operator, JavaScript initializes nameLength to undefined instead of throwing an error.
If you are using Babel as a compiler then you could use it now.
Related link: https://v8.dev/features/optional-chaining

How do I get the result of class getters into JSON? [duplicate]

Take this object:
x = {
"key1": "xxx",
"key2": function(){return this.key1}
}
If I do this:
y = JSON.parse( JSON.stringify(x) );
Then y will return { "key1": "xxx" }. Is there anything one could do to transfer functions via stringify? Creating an object with attached functions is possible with the "ye goode olde eval()", but whats with packing it?
json-stringify-function is a similar post to this one.
A snippet discovered via that post may be useful to anyone stumbling across this answer. It works by making use of the replacer parameter in JSON.stringify and the reviver parameter in JSON.parse.
More specifically, when a value happens to be of type function, .toString() is called on it via the replacer. When it comes time to parse, eval() is performed via the reviver when a function is present in string form.
var JSONfn;
if (!JSONfn) {
JSONfn = {};
}
(function () {
JSONfn.stringify = function(obj) {
return JSON.stringify(obj,function(key, value){
return (typeof value === 'function' ) ? value.toString() : value;
});
}
JSONfn.parse = function(str) {
return JSON.parse(str,function(key, value){
if(typeof value != 'string') return value;
return ( value.substring(0,8) == 'function') ? eval('('+value+')') : value;
});
}
}());
Code Snippet taken from Vadim Kiryukhin's JSONfn.js or see documentation at Home Page
I've had a similar requirement lately. To be clear, the output looks like JSON but in fact is just javascript.
JSON.stringify works well in most cases, but "fails" with functions.
I got it working with a few tricks:
make use of replacer (2nd parameter of JSON.stringify())
use func.toString() to get the JS code for a function
remember which functions have been stringified and replace them directly in the result
And here's how it looks like:
// our source data
const source = {
"aaa": 123,
"bbb": function (c) {
// do something
return c + 1;
}
};
// keep a list of serialized functions
const functions = [];
// json replacer - returns a placeholder for functions
const jsonReplacer = function (key, val) {
if (typeof val === 'function') {
functions.push(val.toString());
return "{func_" + (functions.length - 1) + "}";
}
return val;
};
// regex replacer - replaces placeholders with functions
const funcReplacer = function (match, id) {
return functions[id];
};
const result = JSON
.stringify(source, jsonReplacer) // generate json with placeholders
.replace(/"\{func_(\d+)\}"/g, funcReplacer); // replace placeholders with functions
// show the result
document.body.innerText = result;
body { white-space: pre-wrap; font-family: monospace; }
Important: Be careful about the placeholder format - make sure it's not too generic. If you change it, also change the regex as applicable.
Technically this is not JSON, I can also hardly imagine why would you want to do this, but try the following hack:
x.key2 = x.key2.toString();
JSON.stringify(x) //"{"key1":"xxx","key2":"function (){return this.key1}"}"
Of course the first line can be automated by iterating recursively over the object. Reverse operation is harder - function is only a string, eval will work, but you have to guess whether a given key contains a stringified function code or not.
You can't pack functions since the data they close over is not visible to any serializer.
Even Mozilla's uneval cannot pack closures properly.
Your best bet, is to use a reviver and a replacer.
https://yuilibrary.com/yui/docs/json/json-freeze-thaw.html
The reviver function passed to JSON.parse is applied to all key:value pairs in the raw parsed object from the deepest keys to the highest level. In our case, this means that the name and discovered properties will be passed through the reviver, and then the object containing those keys will be passed through.
This is what I did https://gist.github.com/Lepozepo/3275d686bc56e4fb5d11d27ef330a8ed
function stringifyWithFunctions(object) {
return JSON.stringify(object, (key, val) => {
if (typeof val === 'function') {
return `(${val})`; // make it a string, surround it by parenthesis to ensure we can revive it as an anonymous function
}
return val;
});
};
function parseWithFunctions(obj) {
return JSON.parse(obj, (k, v) => {
if (typeof v === 'string' && v.indexOf('function') >= 0) {
return eval(v);
}
return v;
});
};
The naughty but effective way would be to simply:
Function.prototype.toJSON = function() { return this.toString(); }
Though your real problem (aside from modifying the prototype of Function) would be deserialization without the use of eval.
I have come up with this solution which will take care of conversion of functions (no eval). All you have to do is put this code before you use JSON methods. Usage is exactly the same but right now it takes only one param value to convert to a JSON string, so if you pass remaning replacer and space params, they will be ignored.
void function () {
window.JSON = Object.create(JSON)
JSON.stringify = function (obj) {
return JSON.__proto__.stringify(obj, function (key, value) {
if (typeof value === 'function') {
return value.toString()
}
return value
})
}
JSON.parse = function (obj) {
return JSON.__proto__.parse(obj, function (key, value) {
if (typeof value === 'string' && value.slice(0, 8) == 'function') {
return Function('return ' + value)()
}
return value
})
}
}()
// YOUR CODE GOES BELOW HERE
x = {
"key1": "xxx",
"key2": function(){return this.key1}
}
const y = JSON.parse(JSON.stringify(x))
console.log(y.key2())
It is entirely possible to create functions from string without eval()
var obj = {a:function(a,b){
return a+b;
}};
var serialized = JSON.stringify(obj, function(k,v){
//special treatment for function types
if(typeof v === "function")
return v.toString();//we save the function as string
return v;
});
/*output:
"{"a":"function (a,b){\n return a+b;\n }"}"
*/
now some magic to turn string into function with this function
var compileFunction = function(str){
//find parameters
var pstart = str.indexOf('('), pend = str.indexOf(')');
var params = str.substring(pstart+1, pend);
params = params.trim();
//find function body
var bstart = str.indexOf('{'), bend = str.lastIndexOf('}');
var str = str.substring(bstart+1, bend);
return Function(params, str);
}
now use JSON.parse with reviver
var revivedObj = JSON.parse(serialized, function(k,v){
// there is probably a better way to determ if a value is a function string
if(typeof v === "string" && v.indexOf("function") !== -1)
return compileFunction(v);
return v;
});
//output:
revivedObj.a
function anonymous(a,b
/**/) {
return a+b;
}
revivedObj.a(1,2)
3
To my knowledge, there are no serialization libraries that persist functions - in any language. Serialization is what one does to preserve data. Compilation is what one does to preserve functions.
It seems that people landing here are dealing with structures that would be valid JSON if not for the fact that they contain functions. So how do we handle stringifying these structures?
I ran into the problem while writing a script to modify RequireJS configurations. This is how I did it. First, there's a bit of code earlier that makes sure that the placeholder used internally (">>>F<<<") does not show up as a value in the RequireJS configuration. Very unlikely to happen but better safe than sorry. The input configuration is read as a JavaScript Object, which may contain arrays, atomic values, other Objects and functions. It would be straightforwardly stringifiable as JSON if functions were not present. This configuration is the config object in the code that follows:
// Holds functions we encounter.
var functions = [];
var placeholder = ">>>F<<<";
// This handler just records a function object in `functions` and returns the
// placeholder as the value to insert into the JSON structure.
function handler(key, value) {
if (value instanceof Function) {
functions.push(value);
return placeholder;
}
return value;
}
// We stringify, using our custom handler.
var pre = JSON.stringify(config, handler, 4);
// Then we replace the placeholders in order they were encountered, with
// the functions we've recorded.
var post = pre.replace(new RegExp('"' + placeholder + '"', 'g'),
functions.shift.bind(functions));
The post variable contains the final value. This code relies on the fact that the order in which handler is called is the same as the order of the various pieces of data in the final JSON. I've checked the ECMAScript 5th edition, which defines the stringification algorithm and cannot find a case where there would be an ordering problem. If this algorithm were to change in a future edition the fix would be to use unique placholders for function and use these to refer back to the functions which would be stored in an associative array mapping unique placeholders to functions.

mocking the populate method using mockgoose for mongoose (mongodb library for node.js) is null

Having trouble debugging an issue that mockgoose has for populating a property with fields set. Yads mockgoose http://github.com/yads/Mockgoose fork solved the bug of making the populate option work, but if you specify fields it returns a null for the populated property. I tried looking through the source code and stepping through with the debugger but not sure where to look. I can see in the debugger that the populate option triggers a call to get the child element - and I see the call made returns the right child result with the correct fields, but when the parent element finally comes back it has the property to the child element set to null.
The query:
Posts.findById(foo).populate('createdBy', {fname:1, lname:1});
Incorrectly returns a post with post.createdBy = null. Omitting the fields parameter of fame, lname, somehow makes it work again with post.createdBy returning the full object.
Following are some excerpts from the code - though I'm not sure those are the right places to look.
collections.js
this.find = function (conditions, options, callback) {
var results;
var models = db[name];
if (!_.isEmpty(conditions)) {
results = utils.findModelQuery(models, conditions);
} else {
results = utils.objectToArray(utils.cloneItems(models));
}
results = filter.applyOptions(options, results);
if (results.name === 'MongoError') {
callback(results);
} else {
var result = {
toArray: function (callback) {
callback(null, results);
}
};
callback(null, result);
}
};
util.js
function cloneItems(items) {
var clones = {};
for (var item in items) {
clones[item] = cloneItem(items[item]);
}
return clones;
}
function cloneItem(item) {
return _.cloneDeep(item, function(value) {
// Do not clone items that are ObjectId objects as _.clone mangles them
if (value instanceof ObjectId) {
return new ObjectId(value.toString());
}
});
}
And here's a conversation about the issue
https://github.com/mccormicka/Mockgoose/pull/90

Is it possible to pass a model to a layout in Express?

I know that it is possible to pass a model to a view in express by doing something like this:
exports.locations = function(req, res){
Location.find(function(err, results) {
res.render('locations', { title: 'Locations', locations: results });
});
};
But is it possible to pass a model to my layout?
Assuming you have all (relevant) routes inside a single .js file, you could add a function like this:
function applyGlobals(pageModel) {
pageModel.myGlobalThing = "I'm always available";
pageModel.anotherGlobalThing = 8675309;
return(pageModel);
}
exports.locations = function(req, res){
Location.find(function(err, results) {
res.render('locations', applyGlobals({ title: 'Locations', locations: results }));
});
};
You could also create a more generalizable solution:
function Globalizer(baseContent) {
var theFunc = function(specificContent) {
var keys = Object.keys(baseContent);
for (var i = 0; i < keys.length; i++)
{
// the lets the page content override global content by not
// overwriting it if it exists;
if(!specificContent.hasOwnProperty(keys[i])){
specificContent[keys[i]] = baseContent[keys[i]];
}
}
return specificContent;
};
return theFunc;
};
// And use it like so.
var applyGlobals = new Globalizer({global1: 12, global2: 'otherthing'});
var pageVars = applyGlobals({item1: 'fifteen', 'item2': 15, global2: 'override'});
console.log(require('util').inspect(pageVars));
Which would emit:
{ item1: 'fifteen',
item2: 15,
global2: 'override',
global1: 12 }
Similarly, you could use one of the various mixin, extend assign or similar functions of various libraries like lodash, underscore, etc. See the doc for lodash.assign() which illustrates accomplishing the same sort of thing.
UPDATE One more way of doing it.
You might want to check out Express' app.locals documentation as well - it might work well for you.

Mongodb $where query always true with nodejs

When I query my database with a function passed in the "$where" clause in nodejs, it always return me all documents in the db.
For example, if I do
var stream = timetables.find({$where: function() { return false; }}).stream();
it return me all the documents.
Instead, if I do
var stream = timetables.find({$where: 'function() { return false; }'}).stream();
the function is really executed, and this code doesn't return any document.
The problem is that if I convert in string my function the context's bindinds are removed, and I need them for more complex query. For example:
var n = 1;
var f = function() { return this.number == n; }
var stream = timetables.find({$where: f.toString()}).stream();
// error: n is not defined
Is this a normal behaviour? How can I solve my problem?
Please excuse me for my poor english!
First off, keep in mind that the $where operator should almost never be used for the reasons explained here (credit goes to #WiredPrairie).
Back to your issue, the approach you'd like to take won't work even in the mongodb shell (which explicitly allows naked js functions with the $where operator). The javascript code provided to the $where operator is executed on the mongo server and won't have access to the enclosing environment (the "context bindings").
> db.test.insert({a: 42})
> db.test.find({a: 42})
{ "_id" : ObjectId("5150433c73f604984a7dff91"), "a" : 42 }
> db.test.find({$where: function() { return this.a == 42 }}) // works
{ "_id" : ObjectId("5150433c73f604984a7dff91"), "a" : 42 }
> var local_var = 42
> db.test.find({$where: function() { return this.a == local_var }})
error: {
"$err" : "error on invocation of $where function:\nJS Error: ReferenceError: local_var is not defined nofile_b:1",
"code" : 10071
}
Moreover it looks like that the node.js native mongo driver behaves differently from the shell in that it doesn't automatically serialize a js function you provide in the query object and instead it likely drops the clause altogether. This will leave you with the equivalent of timetables.find({}) which will return all the documents in the collection.
This one is works for me , Just try to store a query as a string in one variable then concat your variable in query string,
var local_var = 42
var query = "{$where: function() { return this.a == "+local_var+"}}"
db.test.find(query)
Store your query into a varibale and use that variable at your find query. It works..... :D
The context will always be that of the mongo database, since the function is executed there. There is no way to share the context between the two instances. You have to rethink the way you query and come up with a different strategy.
You can use a wrapper to pass basic JSON objects, ie. (pardon coffee-script):
# That's the main wrapper.
wrap = (f, args...) ->
"function() { return (#{f}).apply(this, #{JSON.stringify(args)}) }"
# Example 1
where1 = (flag) ->
#myattr == 'foo' or flag
# Example 2 with different arguments
where2 = (foo, options = {}) ->
if foo == options.bar or #_id % 2 == 0
true
else
false
db.collection('coll1').count $where: wrap(where1, true), (err, count) ->
console.log err, count
db.collection('coll1').count $where: wrap(where2, true, bar: true), (err, count) ->
console.log err, count
Your functions are going to be passed as something like:
function () {
return (function (flag) {
return this.myattr === 'foo' || flag;
}).apply(this, [true])
}
...and example 2:
function () {
return (
function (foo, options) {
if (options == null) {
options = {};
}
if (foo === options.bar || this._id % 2 === 0) {
return true;
} else {
return false;
}
}
).apply(this, [ true, { "bar": true } ])
}
This is how it is supposed to be. The drivers don't translate the client code into the mongo function javascript code.
I'm assuming you are using Mongoose to query your database.
If you take a look at the actual Query object implementation, you'll find that only strings are valid arguments for the where prototype.
When using the where clause, you should use it along with the standard operators such as gt, lt that operates on in the path created by the where function.
Remember that Mongoose querying, as in Mongo, is by example, you may want to reconsider your query specification in a more descriptive fashion.

Resources