agenda scheduling defaulting nextRunAt to next year - node.js

I am trying to schedule jobs usign the agenda library. I've done this:
await this.agenda.start()
this.agenda.define(scheduler.id, { lockLifetime: 10000 }, (job, done) => {
console.log('Hello world!')
//some logic using values on job variable
done()
})
const result = await this.agenda.every(scheduler.cron, scheduler.id, scheduler, { startDate: new Date() })
await this.agenda.start() //i added start here too just in case
console.log(result)
That last console.log shows that the job was created successfully and it even saves on database, but the nextRunAt defaults to 1st of january 2022 for some reason (as seen on the full object below). I tried adding the startDate on the every() call but it still shows 2022 and i have no idea why. No matter what cron i send it still doesnt work. The only param i send to Agenda constructor is the database, nothing about when to start.
Thanks!
Job {
agenda: Agenda {
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
_findAndLockNextJob: [Function: findAndLockNextJob],
_name: undefined,
_processEvery: 5000,
_defaultConcurrency: 5,
_maxConcurrency: 20,
_defaultLockLimit: 0,
_lockLimit: 0,
_definitions: { '61af764f8f26fb369c76b571': [Object] },
_runningJobs: [],
_lockedJobs: [],
_jobQueue: JobProcessingQueue { _queue: [] },
_defaultLockLifetime: 600000,
_sort: { nextRunAt: 1, priority: -1 },
_indices: { name: 1, nextRunAt: 1, priority: -1, lockedAt: 1, disabled: 1 },
_isLockingOnTheFly: false,
_isJobQueueFilling: Map(0) {},
_jobsToLock: [],
_ready: Promise { undefined },
_db: MongoClient {
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
s: [Object],
topology: [Topology],
[Symbol(kCapture)]: false,
[Symbol(options)]: [Object: null prototype]
},
_mdb: Db { s: [Object] },
_collection: Collection { s: [Object] },
_processInterval: Timeout {
_idleTimeout: 5000,
_idlePrev: [TimersList],
_idleNext: [TimersList],
_idleStart: 28573,
_onTimeout: [Function: bound processJobs],
_timerArgs: undefined,
_repeat: 5000,
_destroyed: false,
[Symbol(refed)]: true,
[Symbol(kHasPrimitive)]: false,
[Symbol(asyncId)]: 221,
[Symbol(triggerId)]: 0
},
[Symbol(kCapture)]: false
},
attrs: {
name: '61af764f8f26fb369c76b571',
data: {
//stuff that i use here
},
priority: 0,
type: 'single',
nextRunAt: 2022-01-01T03:00:00.000Z, //next run at 2022
repeatInterval: '* * * 1 *',
repeatTimezone: null,
startDate: 2021-12-07T14:57:19.394Z,
endDate: null,
skipDays: null,
_id: new ObjectId("61af764fcb12102f92e637dc")
}
}

Using crontab i noticed that the problem was my cron job that was going to run on the 1st day of the next month, nothing to do with the lib. Now with a correct cron its working as intended

Related

mongodb unable to solve silly thing - having issue with mongodb updating the entry

I want to update the entry at mongodb database I am using atlas cluster.
While updating in local(Local mongodb) with code it works like charm!
But when I do with cluster its not working. but at same when I try to update the cluster one with terminal then it works well. not sure what's wront.
here's my code
const savedData = await db.collection('data').update({ _id: mongo.ObjectID(_id) }, {
$set: {
DataDetails: {
AccountId: data.account_id.toString(),
UniqueId: data.data.unique_id.toString(),
DataId: data.data.id.toString(),
// createdAt: new Date(),
}
}
});
return savedData;
Debuggning this, I have been consoling the savedData at logs, Now the local database returns
result: { n: 1, nModified: 1, ok: 1 },
connection: Connection {
_events: [Object: null prototype] {
error: [Function],
close: [Function],
timeout: [Function],
parseError: [Function]
},
_eventsCount: 4,
_maxListeners: undefined,
options: {
host: 'localhost',
port: 27017,
size: 5,
connectionTimeout: 30000,
socketTimeout: 360000,
keepAlive: true,
keepAliveInitialDelay: 300000,
noDelay: true,
but the cluster one returns
CommandResult {
result: {
n: 0,
nModified: 0,
opTime: { ts: [Timestamp], t: 65 },
electionId: 7fffffff0000000000000041,
ok: 1,
operationTime: Timestamp { _bsontype: 'Timestamp', low_: 1, high_: 1634236388 },
'$clusterTime': { clusterTime: [Timestamp], signature: [Object] }
},
connection: Connection {
_events: [Object: null prototype] {
error: [Function],
close: [Function],
timeout: [Function],
parseError: [Function]
},
_eventsCount: 4,
_maxListeners: undefined
Not sure why this is happening at first place, and not sure how to resolve this?
Stuck at this point since along time.
Please help.

how to find database sessionids for each runstream method api call while executing query in gcloud spanner?

we are seeing database sessions more than the max connections configured.
database.runStream( queryOptions)
below was the session configuration :
const SESSIONCONFIG = {
max: 10,
min: 1,
keepAlive: 10
};
How can we print sessionid using database.runStream api call to ensure same sessionid was reused?
created database connection, using session config options. session pool displayed with below details:
'
pool_:
SessionPool {
_events:
[Object: null prototype] { error: [Function: bound emit], close: [Function] },
_eventsCount: 2,
_maxListeners: undefined,
isOpen: true,
database: [Circular],
options:
{ acquireTimeout: Infinity,
concurrency: Infinity,
fail: false,
idlesAfter: 10,
keepAlive: '1',
labels: [Object],
max: '5',
maxIdle: 1,
min: '1',
writes: 0 },
_myDatabaseConnection: { readonly: [Array], readwrite: [], borrowed: Set {} },
_requests:
PQueue {
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
_carryoverConcurrencyCount: false,
_isIntervalIgnored: true,
_intervalCount: 1,
_intervalCap: Infinity,
_interval: 0,
_intervalId: null,
_intervalEnd: 0,
_timeoutId: null,
queue: [PriorityQueue],
_queueClass: [Function: PriorityQueue],
_pendingCount: 0,
_concurrency: Infinity,
_isPaused: false,
_resolveEmpty: [Function],
_resolveIdle: [Function] },
_acquires:
PQueue {
_events: [Object: null prototype] {},
_eventsCount: 0,
_maxListeners: undefined,
_carryoverConcurrencyCount: false,
_isIntervalIgnored: true,
_intervalCount: 1,
_intervalCap: Infinity,
_interval: 0,
_intervalId: null,
_intervalEnd: 0,
_timeoutId: null,
queue: [PriorityQueue],
_queueClass: [Function: PriorityQueue],
_pendingCount: 0,
_concurrency: 1,
_isPaused: false,
_resolveEmpty: [Function],
_resolveIdle: [Function] },
_traces: Map {},
_onClose: Promise { <pending> },
_evictHandle:
Timeout {
_called: false,
_idleTimeout: 600000,
_idlePrev: null,
_idleNext: null,
_idleStart: 1644,
_onTimeout: [Function],
_timerArgs: undefined,
_repeat: 600000,
_destroyed: false,
_handle: [Timer],
[Symbol(unrefed)]: false,
[Symbol(asyncId)]: 76,
[Symbol(triggerId)]: 1 },
_pingHandle:
Timeout {
_called: false,
_idleTimeout: 60000,
_idlePrev: null,
_idleNext: null,
_idleStart: 1644,
_onTimeout: [Function],
_timerArgs: undefined,
_repeat: 60000,
_destroyed: false,
_handle: [Timer],
[Symbol(unrefed)]: false,
[Symbol(asyncId)]: 78,
[Symbol(triggerId)]: 1 } } }'
return new Promise((resolve, reject) => {
database.runStream(queryOptions)
.on('error', (error) => {
// how to ensure same session id object being used
logger.error( error);
reject(error);
})
.on('data', (row) => {
allData.push(row.toJSON());
})
.on('end', () => {
if (allData.length === 1) {
resolve(allData[0]);
}
});
});
Expected: return session to pool once query execution complete without closing.
Current: Every time a new session created and total number of sessions created exceeds maximum allocated.
I'm not yet sure whether it's possible to get the session ID used by the runStream query. But something you can try is to call Database.getSessions and print the IDs of all sessions. Then you can check at each call to runStream whether new sessions are added or if the total number of sessions exceeds the maximum that you've configured.
How are you checking that it's creating more than the configured max number of sessions?
Looking at the runStream implementation, for each query execution, it is taking a read session from the configured session pool. The pool will not create any more sessions after it's full.

How to read password protected PDF file in Nodejs and get it in buffer?

I tried using pdfjs-dist.
getting large json response.
var PDFJS=require('pdfjs-dist');
PDFJS.getDocument({ url: 'p1.pdf', password: '' }).then(function(pdf_doc)
{
console.log(pdf_doc);
}).catch(function(error) {
// incorrect password
// error is an object having 3 properties : name, message & code
});
Response
This is the whole response I am getting.
but I need response in buffer.
Can it be converted to buffer.
PDFDocumentProxy {
loadingTask:
{ _capability:
{ resolve: [Function], reject: [Function], promise: [Promise] },
_transport:
WorkerTransport {
messageHandler: [Object],
loadingTask: [Circular],
commonObjs: [Object],
fontLoader: [GenericFontLoader],
_params: [Object],
CMapReaderFactory: [DOMCMapReaderFactory],
destroyed: false,
destroyCapability: null,
_passwordCapability: null,
_networkStream: [PDFNodeStream],
_fullReader: [PDFNodeStreamFsFullReader],
_lastProgress: [Object],
pageCache: [],
pagePromises: [],
downloadInfoCapability: [Object],
numPages: 4,
pdfDocument: [Circular] },
_worker:
{ name: null,
destroyed: false,
postMessageTransfers: true,
verbosity: 1,
_readyCapability: [Object],
_port: [LoopbackPort],
_webWorker: null,
_messageHandler: [Object] },
docId: 'd0',
destroyed: false,
onPassword: null,
onProgress: null,
onUnsupportedFeature: null },
_pdfInfo:
{ numPages: 4,
fingerprint: '3432353738363537336c6e665361446f6f744f4a70' },
_transport:
WorkerTransport {
messageHandler:
{ sourceName: 'd0',
targetName: 'd0_worker',
comObj: [LoopbackPort],
callbackId: 1,
streamId: 1,
postMessageTransfers: true,
streamSinks: [Object],
streamControllers: [Object: null prototype] {},
callbacksCapabilities: [Object: null prototype] {},
actionHandler: [Object],
_onComObjOnMessage: [Function] },
loadingTask:
{ _capability: [Object],
_transport: [Circular],
_worker: [Object],
docId: 'd0',
destroyed: false,
onPassword: null,
onProgress: null,
onUnsupportedFeature: null },
commonObjs: { objs: [Object: null prototype] {} },
fontLoader:
GenericFontLoader {
docId: 'd0',
nativeFontFaces: [],
styleElement: null,
loadingContext: [Object],
loadTestFontId: 0 },
_params:
[Object: null prototype] {
url: 'p1.pdf',
password: '',
rangeChunkSize: 65536,
CMapReaderFactory: [Function: DOMCMapReaderFactory],
ignoreErrors: true,
pdfBug: false,
nativeImageDecoderSupport: 'none',
maxImageSize: -1,
isEvalSupported: true,
disableFontFace: true,
disableRange: false,
disableStream: false,
disableAutoFetch: false,
disableCreateObjectURL: false },
CMapReaderFactory: DOMCMapReaderFactory { baseUrl: null, isCompressed: false },
destroyed: false,
destroyCapability: null,
_passwordCapability: null,
_networkStream:
PDFNodeStream {
source: [Object],
url: [Url],
isHttp: false,
isFsUrl: true,
httpHeaders: {},
_fullRequest: [PDFNodeStreamFsFullReader],
_rangeRequestReaders: [Array] },
_fullReader:
PDFNodeStreamFsFullReader {
_url: [Url],
_done: false,
_storedError: null,
onProgress: [Function],
_contentLength: 112979,
_loaded: 112979,
_filename: null,
_disableRange: false,
_rangeChunkSize: 65536,
_isStreamingSupported: true,
_isRangeSupported: true,
_readableStream: [ReadStream],
_readCapability: [Object],
_headersCapability: [Object] },
_lastProgress: { loaded: 112979, total: 112979 },
pageCache: [],
pagePromises: [],
downloadInfoCapability:
{ resolve: [Function], reject: [Function], promise: [Promise] },
numPages: 4,
pdfDocument: [Circular] } }
*ignore below text*
efwrg rgsretg resgerstgh;ergh ;resjgysregh regjes powrjgu oiuueryoeq uieqroeqreqrilih ehr oiyeroeq ioiyeqroeq oieyqrioeq oieqyr oiyeqr oiyeqrp ioqyet oiehr oiyerh oieyreq oiyheqri iohereqk ioheqr qerioyqereq ioehqriheq rioqehriqeb ioeqrhpeq ioeqrhiqe ioqehriq ioqerhioq oirhqeipor oiqehrieq ioehqrq ioeqhrieq iohqerpq ieqhrpeq ioeqhrpeq iheqrpqe oiehrpqe ieqhrqierh ioeqhr ieqhr ioeqrh piqerh ieqhr iheqr piheqr ioheqr iheqr ioeqhrp ioqhre oieqhr oeqiyr qoeiryf pouqer poqure pouqr pouqre[q poquerq poqeur[q poqeur poqwuer poquer[ poqwur[wq poqr[ poqwhr powrq pow
You may open and read a password protected PDF like below. Working with your existing code:
var PDFJS = require('pdfjs-dist');
PDFJS.getDocument({ url: 'p1.pdf', password: '' }).then(function(pdf)
{
let text = [];
for(let i = 1; i <= pdf.numPages; i++) {
pdf.getPage(i).then(function(page) {
page.getTextContent().then(function(data) {
for(let j = 0; j < data.items.length; j++) {
text.push(data.items[j].str);
}
});
});
}
}).catch(function(error) {
// incorrect password
// error is an object having 3 properties : name, message & code
});

NodeJS - find by Id not working

i am just starting with NodeJs, I followed a few tutorials but after I try to do it by myself I am struggling with some things.
I'm trying to edit a post saved into my Mlab (Mongo) database. But I can't reach my values even if I have a good response:
My server.js code
app.get('/post/:id/edit', function(req,res){
console.log(req.params.id)
db.collection('posts').find({_id:ObjectId(req.params.id)},{},{}, function(err, result){
console.log(result)
if (err) return console.log(err)
res.render('edit.ejs', {post: result})
})
})
As a result I'm getting the below: (no text or title as expected)
Readable {
pool: null,
server: null,
disconnectHandler:
{ s: { storedOps: [], storeOptions: [Object], topology: [Object] },
length: [Getter] },
bson: {},
ns: 'analistafuncionalblog.posts',
cmd:
{ find: 'analistafuncionalblog.posts',
limit: 0,
skip: 0,
query: { _id: 5921bf9aff2e7524b4552480 },
readPreference: { preference: 'primary', tags: undefined, options: [Object] },
slaveOk: true,
fields: {} },
options:
{ readPreference: { preference: 'primary', tags: undefined, options: [Object] },
skip: 0,
limit: 0,
raw: undefined,
hint: null,
timeout: undefined,
slaveOk: true,
db:
EventEmitter {
domain: null,
_events: {},
_eventsCount: 0,
_maxListeners: undefined,
s: [Object],
serverConfig: [Getter],
bufferMaxEntries: [Getter],
databaseName: [Getter] },
promiseLibrary: [Function: Promise],
disconnectHandler: { s: [Object], length: [Getter] } },
topology:
EventEmitter {
domain: null,
_events:
{ reconnect: [Function],
reconnectFailed: [Function],
timeout: [Function],
error: [Object],
close: [Function],
destroy: [Function],
serverDescriptionChanged: [Function],
serverHeartbeatStarted: [Function],
serverHeartbeatSucceeded: [Function],
serverHeartbeatFailed: [Function],
serverOpening: [Function],
serverClosed: [Function],
topologyOpening: [Function],
topologyClosed: [Function],
topologyDescriptionChanged: [Function],
attemptReconnect: [Function],
monitoring: [Function] },
_eventsCount: 17,
_maxListeners: undefined,
id: 0,
s:
{ options: [Object],
logger: [Object],
Cursor: [Object],
bson: {},
pool: [Object],
disconnectHandler: [Object],
monitoring: true,
inTopology: false,
monitoringInterval: 5000,
topologyId: -1 },
ismaster:
{ hosts: [Object],
setName: 'rs-ds149221',
setVersion: 1,
ismaster: true,
secondary: false,
primary: 'ds149221-a.mlab.com:49221',
me: 'ds149221-a.mlab.com:49221',
electionId: 7fffffff0000000000000001,
maxBsonObjectSize: 16777216,
maxMessageSizeBytes: 48000000,
maxWriteBatchSize: 1000,
localTime: Sun May 21 2017 16:50:58 GMT-0300 (Argentina Standard Time),
maxWireVersion: 4,
minWireVersion: 0,
ok: 1 },
lastIsMasterMS: 168,
monitoringProcessId:
{ _called: false,
_idleTimeout: 5000,
_idlePrev: [Object],
_idleNext: [Object],
_idleStart: 17129,
_onTimeout: [Function],
_repeat: null },
initalConnect: false,
wireProtocolHandler: { legacyWireProtocol: {} },
_type: 'server',
clientInfo:
{ driver: [Object],
os: [Object],
platform: 'Node.js v4.4.7, LE, mongodb-core: 2.1.10' },
lastUpdateTime: 0,
lastWriteDate: 0,
staleness: 0 },
cursorState:
{ cursorId: null,
cmd:
{ find: 'analistafuncionalblog.posts',
limit: 0,
skip: 0,
query: [Object],
readPreference: [Object],
slaveOk: true,
fields: {} },
documents: [],
cursorIndex: 0,
dead: false,
killed: false,
init: false,
notified: false,
limit: 0,
skip: 0,
batchSize: 1000,
currentLimit: 0,
transforms: undefined },
logger: { className: 'Cursor' },
_readableState:
ReadableState {
objectMode: true,
highWaterMark: 16,
buffer: [],
length: 0,
pipes: null,
pipesCount: 0,
flowing: null,
ended: false,
endEmitted: false,
reading: false,
sync: true,
needReadable: false,
emittedReadable: false,
readableListening: false,
resumeScheduled: false,
defaultEncoding: 'utf8',
ranOut: false,
awaitDrain: 0,
readingMore: false,
decoder: null,
encoding: null },
readable: true,
domain: null,
_events: {},
_eventsCount: 0,
_maxListeners: undefined,
s:
{ numberOfRetries: 5,
tailableRetryInterval: 500,
currentNumberOfRetries: 5,
state: 0,
streamOptions: {},
bson: {},
ns: 'analistafuncionalblog.posts',
cmd:
{ find: 'analistafuncionalblog.posts',
limit: 0,
skip: 0,
query: [Object],
readPreference: [Object],
slaveOk: true,
fields: {} },
options:
{ readPreference: [Object],
skip: 0,
limit: 0,
raw: undefined,
hint: null,
timeout: undefined,
slaveOk: true,
db: [Object],
promiseLibrary: [Function: Promise],
disconnectHandler: [Object] },
topology:
EventEmitter {
domain: null,
_events: [Object],
_eventsCount: 17,
_maxListeners: undefined,
id: 0,
s: [Object],
ismaster: [Object],
lastIsMasterMS: 168,
monitoringProcessId: [Object],
initalConnect: false,
wireProtocolHandler: [Object],
_type: 'server',
clientInfo: [Object],
lastUpdateTime: 0,
lastWriteDate: 0,
staleness: 0 },
topologyOptions:
{ host: 'ds149221.mlab.com',
port: 49221,
disconnectHandler: [Object],
cursorFactory: [Object],
reconnect: true,
emitError: true,
size: 5,
socketOptions: {},
socketTimeout: 30000,
connectionTimeout: 30000,
clientInfo: [Object],
readPreference: [Object],
promiseLibrary: [Function: Promise],
bson: {} },
promiseLibrary: [Function: Promise],
currentDoc: null },
sortValue: undefined }
In my Mlab I keep my collection like:
{
"_id": {
"$oid": "5921bf9aff2e7524b4552480"
},
"title": "Prueba 1",
"text": "Ezequiel prueba texto"
}
how can I get the title and the text?
Thanks
collection.find() returns a cursor, which you need to read from in order to get the actual documents from the database.
For that, you can use cursor.toArray():
db.collection('posts').find({
_id : ObjectId(req.params.id)
}).toArray(function(err, results) {
...
});
However, since you're searching on a unique property (_id), you could also use collection.findOne():
db.collection('posts').findOne({ _id : ObjectId(req.params.id) }, function(err, post) {
...
});
I suggest you to use Mongoose, it allows you to do a lot of useful things, and is very easy to use. In your case, you can define a schema for the db and then apply all the mongoDB query on it.
Your record.js looks like that:
var mongoose = require('mongoose');
var Record = mongoose.Schema({
title: {type: String, required: true},
text: {type: String, required: true}
});
module.exports = mongoose.model('Record', Record);
Then in a new file, you can import the model and make all the query that you need:
var mongoose = require('mongoose');
var Record = require('./record');
mongoose.connect('mongodb://localhost/YOURDB');
app.get('/post/:id/edit', function(req,res){
console.log(req.params.id)
Record.findById(req.params.id, function(err, record){
if(err) throw err;
else return res.render('edit.ejs', {post: result});
});
});

Updating Mongo documents through node but no results

I've been trying to add emails (which is a new field) to my "people" collection but I don't know why I'm getting no results from the server. This is my code
for (key in D_emails) {
console.log(D_emails[key])
try {
o_id = new mongo.ObjectID(D_emails[key])
collection.updateOne({
_id: o_id
}, {
$set: {
"Email": key
}
}, function(err, results) {
if (err) {
throw err
} else if (results.length) {
console.log(results)
} else {
console.log("no results")
}
});
} catch (e) {
print(e);
}
}
According to mongo documentation for updateOne the response of the query (results in your case) does not contain length element and it is not an array or an object. This will be the response according to the documentation:
Returns a document containing:
A boolean acknowledged as true if the operation ran with write concern or false if write concern was disabled
matchedCount containing the number of matched documents
modifiedCount the number of modified documents upsertedId containing the _id for
the upserted document
first of all. try to code without try catch blocks when using the concept of "err" object being returned from every asynchronous callback. You can treat the error right inside the callback. "throwing" the error will also prevent other emails from being updated. But maybe that's what you want. It doesn't really matter now.
Now, back to your question. let me show you what result is, in this example:
// inserting.
collection.insertOne({_id: "lala", a:10, b:20}, function (err, r) {
assert.equal(null, err);
assert.equal(1, r.insertedCount);
// updating.
collection.updateOne({_id: "lala"}, {$set: {a: 99}}, {}, function(err, result) {
assert.equal(null, err);
console.log(result) // first thing printed.
console.log(JSON.stringify(result, null, "\t")) // second print.
// checking whats inside it.
collection.findOne({_id: "lala"}, {}, function(err, doc) {
assert.equal(null, err);
console.log(doc) // third print.
db.close() // don't close connection if you don't need to.
})
})
})
the 3 printed things will be (scroll to the end to see what you actually want):
CommandResult {
result: { ok: 1, nModified: 1, n: 1 },
connection:
Connection {
domain: null,
_events:
{ close: [Object],
error: [Object],
timeout: [Object],
parseError: [Object],
connect: [Function] },
_eventsCount: 5,
_maxListeners: undefined,
options:
{ socketOptions: {},
auto_reconnect: true,
host: 'localhost',
port: 27017,
cursorFactory: [Object],
reconnect: true,
emitError: true,
size: 5,
disconnectHandler: [Object],
bson: BSON {},
messageHandler: [Function],
wireProtocolHandler: [Object] },
id: 1,
logger: Logger { className: 'Connection' },
bson: BSON {},
tag: undefined,
messageHandler: [Function],
maxBsonMessageSize: 67108864,
port: 27017,
host: 'localhost',
keepAlive: true,
keepAliveInitialDelay: 0,
noDelay: true,
connectionTimeout: 0,
socketTimeout: 0,
destroyed: false,
domainSocket: false,
singleBufferSerializtion: true,
serializationFunction: 'toBinUnified',
ca: null,
cert: null,
key: null,
passphrase: null,
ssl: false,
rejectUnauthorized: false,
checkServerIdentity: true,
responseOptions: { promoteLongs: true },
flushing: false,
queue: [],
connection:
Socket {
connecting: false,
_hadError: false,
_handle: [Object],
_parent: null,
_host: 'localhost',
_readableState: [Object],
readable: true,
domain: null,
_events: [Object],
_eventsCount: 8,
_maxListeners: undefined,
_writableState: [Object],
writable: true,
allowHalfOpen: false,
destroyed: false,
_bytesDispatched: 334,
_sockname: null,
_pendingData: null,
_pendingEncoding: '',
server: null,
_server: null,
_idleNext: null,
_idlePrev: null,
_idleTimeout: -1,
read: [Function],
_consuming: true },
writeStream: null,
hashedName: '29bafad3b32b11dc7ce934204952515ea5984b3c',
buffer: null,
sizeOfMessage: 0,
bytesRead: 0,
stubBuffer: null },
matchedCount: 1,
modifiedCount: 1,
upsertedId: null,
upsertedCount: 0 }
{
"ok": 1,
"nModified": 1,
"n": 1
}
{ _id: 'lala', a: 99, b: 20 }
the first object is the result returned from "updateOne()".
the second object is the result's "toString()" implementation and this is what's inside result.result (scroll back to top).
the last object is what I get after querying for the updated document.
you can find more examples on the native mongodb driver for nodejs website: http://mongodb.github.io/node-mongodb-native/2.2/api/Collection.html#updateOne
their are full of examples.

Resources