stack size exceeded when passing objects as metadata in winston - node.js

In ,winston when I tried logging by passing a mongoose query result as a metadata argument, winston just spit out like a thousand lines of log before the task quit.
So for a log like this :
tSchool.findById(bus.schoolid,function(err,school){
winston.info('loaded school',school);
});
Here's a small piece of whats get output :
return _next.apply(this, arguments);
}, remove=function wrappedPointCut() {
var args = [].slice.call(arguments);
var lastArg = args.pop();
var fn;
var originalStack = new Error().stack;
var $results;
if (lastArg && typeof lastArg !== 'function') {
args.push(lastArg);
} else {
fn = lastArg;
}
var promise = new Promise.ES6(function(resolve, reject) {
args.push(function(error) {
if (error) {
// gh-2633: since VersionError is very generic, take the
// stack trace of the original save() function call rather
// than the async trace
if (error instanceof VersionError) {
error.stack = originalStack;
}
_this.$__handleReject(error);
reject(error);
return;
}
// There may be multiple results and promise libs other than
// mpromise don't support passing multiple values to `resolve()`
$results = Array.prototype.slice.call(arguments, 1);
resolve.apply(promise, $results);
});
_this[newName].apply(_this, args);
});
if (fn) {
if (_this.constructor.$wrapCallback) {
fn = _this.constructor.$wrapCallback(fn);
}
return promise.then(
function() {
process.nextTick(function() {
fn.apply(null, [null].concat($results));
});
},
function(error) {
process.nextTick(function() {
fn(error);
});
});
}
return promise;
}
So I wanted to know a few things :
Why is passing a mongoose query result, which is supposed to be just a small json object, printing such gibberish?
Will this happen for other objects also - like the err objects in callbacks .etc?
How do I prevent this? Checking each and every log statement to ensure no query results are passed is not very practical.
Thanks in advance.
Update :
Issues #862, #474 and #914 are tracking/related to this problem, but there hasn't been much progress.

This issue has been fixed in pull request #977 of Winston. you can check out the details on the PR page.

Related

Node / Mongoose: res.send() erases previous Mongo update

I'm updating an object in an array with Mongoose. After it updates I'm firing res.send() in the callback.
If I don't fire the res.send() the update saves fine. But when I do res.send(), the entire object in the array is erased from Mongo.
landmarkSchema.findById(tileRes.worldID, function(err, lm) {
if (!lm){
console.log(err);
}
else if (req.user._id == lm.permissions.ownerID){
var min = tileRes.zooms[0];
var max = tileRes.zooms.slice(-1)[0];
if (lm.style.maps.localMapArray){
for (var i = 0; i < lm.style.maps.localMapArray.length; i++) { //better way to do this with mongo $set
if (lm.style.maps.localMapArray[i].map_marker_viewID == req.body.map_marker_viewID) {
lm.style.maps.localMapArray[i]['temp_upload_path'] = '';
lm.style.maps.localMapArray[i]['localMapID'] = tileRes.mapURL;
lm.style.maps.localMapArray[i]['localMapName'] = tileRes.worldID;
lm.markModified('style.maps.localMapArray');
lm.save(function(err, landmark) {
if (err){
console.log('error');
}
else {
console.log('updated map',landmark);
//res.status(200).send(landmark);
}
});
}
}
}
}
});
Is it a write issue where Mongo doesn't finish writing before res.send is fired?
I recommend start using async to iterate in these cases, of course you can do this without it, but you'll have to find a very good reason to avoid using it.
I don't see the rest of your method, but using async it should be similar to this:
async.each(lm.style.maps.localMapArray, function(localMap, callback) {
// Do whatever you like here
//....
// Call method that requires a callback and pass the loop callback
localMap.iUseACallbackAfterDoingMyStuff(callback);
}, function(err){
// now here the loop has ended
if( err ) {
// Something happened..
} else {
res.status(200).send(somethingToTheClient);
}
});
The code snippet is just for you to get the idea.

New to NodeJS. Mongoose How to Async multiple queries?

Ok, lets say I have two Models. Contract and CommLog. Both work find independently but I need many CommLog to relate to each Contract.
In the ContractSchema trying async
ContractSchema.methods.getCommLog = function getCommLog(){
var log = false;
async.parallel([
function(){
CommLog.find({commType:'contract',parent:this._id},function(err,comms){
log = comms;
});
}],
function(){return log;});
};
Where I am trying to use it
router.get('/:code', function(req, res, next) {
Contract.findOne({accessCode:req.params.code},function(err,contract){
if(err)
res.send(err);
var data;
if(contract != null){
var comms = contract.getCommLog();
data = {error:false,data:contract,commlog:comms}
}else{
data = {error:true,message:"No Contract"}
}
res.json(data);
});
});
Where it shows var comms = contract.getCommLog(); It is never returning anything because the getCommLog() is not executing async...
I think its my misunderstanding of mongoose querying, so if you understand what I am trying to accomplish, please let me know what I am doing wrong. I have tried without async which would always return false.
The find call can return all matching results with one query, so I don't think you need async here. The reason it is not populating correctly when you call res.json(data) is because you are not waiting for the method call to finish before you fire off your server response. You would be better off nesting an additional CommLogs.find call within the Contract.find call, and only sending your response once that finishes.
//pseudo code:
Contract.find({}, function(err, contract) {
if(err || !contract) {
//return error response
}
else {
CommLogs.find({contract: contract._id}, function(err, commlogs) {
if(err || !commlogs) {
//return error response 2
}
else {
res.json({errors: false, contract: contract, commlogs: commlogs});
}
});
}
}

WebRTC getStat() API Set UP

I am trying to use getStat() from WebRTC's api to see if it provides any useful info measure latency and other video streaming data. The problem is that there's not much info of how to use it.
Even older existing examples are pretty rare but the api has changed since then.
For example, my set up:
peerconnection.getStats(function(stats) {
console.log(stats); } ));
This returns a RTCStatsResponse object with 2 functions
RTCStatsResponse {result: function, namedItem: function}
Trying to call that result() function returns an array of RTCStatsReport objects with type 'googLibjingleSession' for the 1st object and type 'googTrack' for the 2nd object. The other nameItem function is undefined when trying to call it
[RTCStatsReport, RTCStatsReport]
From what little info available (https://groups.google.com/forum/#!topic/discuss-webrtc/fpr4yn4-3sg), I would be getting alot more RTCStatObjects with more useful info than I am currently getting.
Does anyone have experience with using webrtc's getStats? I believe I may not be doing this correctly
The following solution works for me.
Creating peer connection
pc = new RTCPeerConnection(pc_config, pc_constraints);
Adding onaddstream handler
pc.onaddstream = onRemoteStreamAdded;
The handler itself
var onRemoteStreamAdded = function(event) {
attachMediaStream(remoteVideo, event.stream);
remoteStream = event.stream;
getStats(pc);
};
Pay attention on the getStats function called from the handler, the function is following
function getStats(peer) {
myGetStats(peer, function (results) {
for (var i = 0; i < results.length; ++i) {
var res = results[i];
console.log(res);
}
setTimeout(function () {
getStats(peer);
}, 1000);
});
}
The myGetStats function is a wrapper to make it possible universal in different browsers;
function myGetStats(peer, callback) {
if (!!navigator.mozGetUserMedia) {
peer.getStats(
function (res) {
var items = [];
res.forEach(function (result) {
items.push(result);
});
callback(items);
},
callback
);
} else {
peer.getStats(function (res) {
var items = [];
res.result().forEach(function (result) {
var item = {};
result.names().forEach(function (name) {
item[name] = result.stat(name);
});
item.id = result.id;
item.type = result.type;
item.timestamp = result.timestamp;
items.push(item);
});
callback(items);
});
}
};
Every second it will get statistics and print raw object into console log. You can parse the log and then change the code, getting necessary object's field.

Trying to make my own RxJs observable

I'm trying to convert an existing API to work with RxJS... fairly new to node, and very new to RxJs, so please bear with me.
I have an existing API (getNextMessage), that either blocks (asynchronously), or returns a new item or error via a node-style (err, val) callback, when the something becomes available.
so it looks something like:
getNextMessage(nodeStyleCompletionCallback);
You could think of getNextMessage like an http request, that completes in the future, when the server responds, but you do need to call getNextMessage again, once a message is received, to keep getting new items from the server.
So, in order to make it into an observable collection, I have to get RxJs to keep calling my getNextMessage function until the subscriber is disposed();
Basically, I'm trying to create my own RxJs observable collection.
The problems are:
I don't know how to make subscriber.dispose() kill the async.forever
I probably shouldn't be using async.forever in the first place
I'm not sure I should be even getting 'completed' for each message - shouldn't that be at the end of a sequence
I'd like to eventually remove the need for using fromNodeCallback, to have a first class RxJS observable
Clearly I'm a little confused.
Would love a bit of help, thanks!
Here is my existing code:
var Rx = require('rx');
var port = require('../lib/port');
var async = require('async');
function observableReceive(portName)
{
var observerCallback;
var listenPort = new port(portName);
var disposed = false;
var asyncReceive = function(asyncCallback)
{
listenPort.getNextMessage(
function(error, json)
{
observerCallback(error, json);
if (!disposed)
setImmediate(asyncCallback);
}
);
}
return function(outerCallback)
{
observerCallback = outerCallback;
async.forever(asyncReceive);
}
}
var receive = Rx.Observable.fromNodeCallback(observableReceive('rxtest'));
var source = receive();
var subscription = source.forEach(
function (json)
{
console.log('receive completed: ' + JSON.stringify(json));
},
function (error) {
console.log("receive failed: " + error.toString());
},
function () {
console.log('Completed');
subscription.dispose();
}
);
So here's probably what I would do.
var Rx = require('Rx');
// This is just for kicks. You have your own getNextMessage to use. ;)
var getNextMessage = (function(){
var i = 1;
return function (callback) {
setTimeout(function () {
if (i > 10) {
callback("lawdy lawd it's ova' ten, ya'll.");
} else {
callback(undefined, i++);
}
}, 5);
};
}());
// This just makes an observable version of getNextMessage.
var nextMessageAsObservable = Rx.Observable.create(function (o) {
getNextMessage(function (err, val) {
if (err) {
o.onError(err);
} else {
o.onNext(val);
o.onCompleted();
}
});
});
// This repeats the call to getNextMessage as many times (11) as you want.
// "take" will cancel the subscription after receiving 11 items.
nextMessageAsObservable
.repeat()
.take(11)
.subscribe(
function (x) { console.log('next', x); },
function (err) { console.log('error', err); },
function () { console.log('done'); }
);
I realize this is over a year old, but I think a better solution for this would be to make use of recursive scheduling instead:
Rx.Observable.forever = function(next, scheduler) {
scheduler = scheduler || Rx.Scheduler.default,
//Internally wrap the the callback into an observable
next = Rx.Observable.fromNodeCallback(next);
return Rx.Observable.create(function(observer) {
var disposable = new Rx.SingleAssignmentDisposable(),
hasState = false;
disposable.setDisposable(scheduler.scheduleRecursiveWithState(null,
function(state, self) {
hasState && observer.onNext(state);
hasState = false;
next().subscribe(function(x){
hasState = true;
self(x);
}, observer.onError.bind(observer));
}));
return disposable;
});
};
The idea here is that you can schedule new items once the previous one has completed. You call next() which invokes the passed in method and when it returns a value, you schedule the next item for invocation.
You can then use it like so:
Rx.Observable.forever(getNextMessage)
.take(11)
.subscribe(function(message) {
console.log(message);
});
See a working example here

Iterating over a mongodb cursor serially (waiting for callbacks before moving to next document)

Using mongoskin, I can do a query like this, which will return a cursor:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
}
}
However, I'd like to call some async functions for each document, and only move on to the next item on the cursor after this has called back (similar to the eachSeries structure in the async.js module). E.g:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
externalAsyncFunction(result, function(err) {
//externalAsyncFunction completed - now want to move to next doc
});
}
}
How could I do this?
Thanks
UPDATE:
I don't wan't to use toArray() as this is a large batch operation, and the results might not fit in memory in one go.
A more modern approach that uses async/await:
const cursor = db.collection("foo").find({});
while(await cursor.hasNext()) {
const doc = await cursor.next();
// process doc here
}
Notes:
This may be even more simple to do when async iterators arrive.
You'll probably want to add try/catch for error checking.
The containing function should be async or the code should be wrapped in (async function() { ... })() since it uses await.
If you want, add await new Promise(resolve => setTimeout(resolve, 1000)); (pause for 1 second) at the end of the while loop to show that it does process docs one after the other.
If you don't want to load all of the results into memory using toArray, you can iterate using the cursor with something like the following.
myCollection.find({}, function(err, resultCursor) {
function processItem(err, item) {
if(item === null) {
return; // All done!
}
externalAsyncFunction(item, function(err) {
resultCursor.nextObject(processItem);
});
}
resultCursor.nextObject(processItem);
}
since node.js v10.3 you can use async iterator
const cursor = db.collection('foo').find({});
for await (const doc of cursor) {
// do your thing
// you can even use `await myAsyncOperation()` here
}
Jake Archibald wrote a great blog post about async iterators, that I came to know after reading #user993683's answer.
This works with large dataset by using setImmediate:
var cursor = collection.find({filter...}).cursor();
cursor.nextObject(function fn(err, item) {
if (err || !item) return;
setImmediate(fnAction, item, arg1, arg2, function() {
cursor.nextObject(fn);
});
});
function fnAction(item, arg1, arg2, callback) {
// Here you can do whatever you want to do with your item.
return callback();
}
If someone is looking for a Promise way of doing this (as opposed to using callbacks of nextObject), here it is. I am using Node v4.2.2 and mongo driver v2.1.7. This is kind of an asyncSeries version of Cursor.forEach():
function forEachSeries(cursor, iterator) {
return new Promise(function(resolve, reject) {
var count = 0;
function processDoc(doc) {
if (doc != null) {
count++;
return iterator(doc).then(function() {
return cursor.next().then(processDoc);
});
} else {
resolve(count);
}
}
cursor.next().then(processDoc);
});
}
To use this, pass the cursor and an iterator that operates on each document asynchronously (like you would for Cursor.forEach). The iterator needs to return a promise, like most mongodb native driver functions do.
Say, you want to update all documents in the collection test. This is how you would do it:
var theDb;
MongoClient.connect(dbUrl).then(function(db) {
theDb = db; // save it, we'll need to close the connection when done.
var cur = db.collection('test').find();
return forEachSeries(cur, function(doc) { // this is the iterator
return db.collection('test').updateOne(
{_id: doc._id},
{$set: {updated: true}} // or whatever else you need to change
);
// updateOne returns a promise, if not supplied a callback. Just return it.
});
})
.then(function(count) {
console.log("All Done. Processed", count, "records");
theDb.close();
})
You can do something like this using the async lib. The key point here is to check if the current doc is null. If it is, it means you are finished.
async.series([
function (cb) {
cursor.each(function (err, doc) {
if (err) {
cb(err);
} else if (doc === null) {
cb();
} else {
console.log(doc);
array.push(doc);
}
});
}
], function (err) {
callback(err, array);
});
You could use a Future:
myCollection.find({}, function(err, resultCursor) {
resultCursor.count(Meteor.bindEnvironment(function(err,count){
for(var i=0;i<count;i++)
{
var itemFuture=new Future();
resultCursor.nextObject(function(err,item)){
itemFuture.result(item);
}
var item=itemFuture.wait();
//do what you want with the item,
//and continue with the loop if so
}
}));
});
You can get the result in an Array and iterate using a recursive function, something like this.
myCollection.find({}).toArray(function (err, items) {
var count = items.length;
var fn = function () {
externalAsyncFuntion(items[count], function () {
count -= 1;
if (count) fn();
})
}
fn();
});
Edit:
This is only applicable for small datasets, for larger one's you should use cursors as mentioned in other answers.
A more modern approach that uses for await:
const cursor = db.collection("foo").find({});
for await(const doc of cursor) {
// process doc here with await
await processDoc(doc);
}
You could use simple setTimeOut's. This is an example in typescript running on nodejs (I am using promises via the 'when' module but it can be done without them as well):
import mongodb = require("mongodb");
var dbServer = new mongodb.Server('localhost', 27017, {auto_reconnect: true}, {});
var db = new mongodb.Db('myDb', dbServer);
var util = require('util');
var when = require('when'); //npm install when
var dbDefer = when.defer();
db.open(function() {
console.log('db opened...');
dbDefer.resolve(db);
});
dbDefer.promise.then(function(db : mongodb.Db){
db.collection('myCollection', function (error, dataCol){
if(error) {
console.error(error); return;
}
var doneReading = when.defer();
var processOneRecordAsync = function(record) : When.Promise{
var result = when.defer();
setTimeout (function() {
//simulate a variable-length operation
console.log(util.inspect(record));
result.resolve('record processed');
}, Math.random()*5);
return result.promise;
}
var runCursor = function (cursor : MongoCursor){
cursor.next(function(error : any, record : any){
if (error){
console.log('an error occurred: ' + error);
return;
}
if (record){
processOneRecordAsync(record).then(function(r){
setTimeout(function() {runCursor(cursor)}, 1);
});
}
else{
//cursor up
doneReading.resolve('done reading data.');
}
});
}
dataCol.find({}, function(error, cursor : MongoCursor){
if (!error)
{
setTimeout(function() {runCursor(cursor)}, 1);
}
});
doneReading.promise.then(function(message : string){
//message='done reading data'
console.log(message);
});
});
});

Resources