Document must be a valid JavaScript object in Nodejs - node.js

Simply trying to set a new field to the value of an existing field in the same document without iterating (can do iteration but aggregation pipeline seems more efficient)
The aggrgation expression comes from mongodb aggregation builder and it previews fine there.
Code
const { MongoClient } = require('mongodb');
const agg = [
{
'$match': {}
}, {
'$set': {
'old_price': '$price'
}
}, {}, {}];
const url= '...y';
const client = new MongoClient(url);
async function main() {
const client = await MongoClient.connect(url, { useNewUrlParser: true, useUnifiedTopology: true });
const dbName = 'mongodbVSCodePlaygroundDB';
const collname='test';
const coll = client.db(dbName).collection(collname);
const rset = await coll.updateMany(agg);
console.log(rset);
await client.close();
}
main()
.then(console.log('ok'))
.catch(console.error)
.finally(() => client.close());
ERROR: MongoInvalidArgumentError: Document must be a valid JavaScript object
Occurs at this line:
const rset = await coll.updateMany(agg);
Any pointers on getting this to work?
Tried a number of variations with quotes and square braces

Related

Parameter obj to Document() must be an object when trying to convert array to mongoose document with redis

I have using redis to cache my queries. Its working fine with object but not when i get array. It gives me an error **"Parameter "obj" to Document() must be an object, got kids", **. It also happens with count query. Here is my code :
const mongoose = require("mongoose");
const redis = require("redis");
const util = require("util");
const client = redis.createClient(process.env.REDIS_URL);
client.hget = util.promisify(client.hget);
const exec = mongoose.Query.prototype.exec;
mongoose.Query.prototype.cache = async function (options = {}) {
this.useCache = true;
this.hashKey = JSON.stringify(options.key || "");
this.time = JSON.stringify(options.time || 36000);
return this;
};
mongoose.Query.prototype.exec = async function () {
if (!this.useCache) {
return exec.apply(this, arguments);
}
const key = JSON.stringify(
Object.assign({}, this.getQuery(), {
collection: this.mongooseCollection.name,
})
);
// client.flushdb(function (err, succeeded) {
// console.log(succeeded); // will be true if successfull
// });
const cacheValue = await client.hget(this.hashKey, key);
if (cacheValue) {
const doc = JSON.parse(cacheValue);
/*
this.model refers to the Class of the corresponding Mongoose Model of the query being executed, example: User,Blog
this function must return a Promise of Mongoose model objects due to the nature of the mongoose model object having other
functions attached once is created ( validate,set,get etc)
*/
console.log("Response from Redis");
console.log(doc);
console.log(Array.isArray(doc));
return Array.isArray(doc)
? doc.map((d) => new this.model(d))
: new this.model(doc);
}
//await the results of the query once executed, with any arguments that were passed on.
const result = await exec.apply(this, arguments);
client.hset(this.hashKey, key, JSON.stringify(result));
client.expire(this.hashKey, this.time);
console.log("Response from MongoDB");
return result;
};
module.exports = {
clearHash(hashKey) {
client.del(JSON.stringify(hashKey));
},
};
Data in redis - [ 'kids', 'men', 'women' ]
Query - const collectionType = await Product.find() .distinct("collectionType") .cache({ key: "COLLECTION_TYPE" });
can i anyone please tell me what i am doing wrong?
I have solved by directly returning the doc and its working fine. Not sure if it is the right way if i directly do return doc then sending data from redis only

How use MongoDB change streams in Node.js to populate a new collection

I want to use MongoDB change streams to watch insertion/updates on a first collection to populate, when a condition is meet,another collection with computed values extracted from the watched collection.
Following Mongodb tutorial, I came to the following results:
require('dotenv').config();
const { MongoClient } = require('mongodb');
const stream = require('stream');
const es = require('event-stream');
async function monitorListingsUsingStreamAPI(client, pipeline = []) {
const collection = client
.db(process.env.MONGO_DB)
.collection(process.env.COLLECTION_TO_MONITOR);
const changeStream = collection.watch(pipeline);
const collection_dest = client
.db(process.env.MONGO_DB)
.collection(process.env.COLLECTION_TO_POPULATE);
changeStream.pipe(
es.map(function (doc, next) {
const { _id, ...data } = doc.fullDocument;
const new_doc = { size: data.samples.length, data };
(async () => {
await collection_dest.insertOne(new_doc, next);
})();
}),
);
}
async function main() {
const uri = process.env.MONGO_DB_URI;
const client = new MongoClient(uri, {
useUnifiedTopology: true,
useNewUrlParser: true,
});
try {
// Connect to the MongoDB cluster
await client.connect();
const pipeline = [
{
$match: {
operationType: 'insert',
'fullDocument.samples': { $size: 3 },
},
},
];
// Monitor new listings using the Stream API
await monitorListingsUsingStreamAPI(client, pipeline);
}
}
Actually it seems to work but I used event-stream to pipe MongoDB change stream into another one where I used an immediately-invoked anonymous async functions to populate the second collection.
I wonder if this approach is correct? How to use transform streams?

Best practice running queries in Node.js with MongoDB driver 3.6?

The official documentation of the Node.js Driver version 3.6 contains the following example for the .find() method:
const { MongoClient } = require("mongodb");
// Replace the uri string with your MongoDB deployment's connection string.
const uri = "mongodb+srv://<user>:<password>#<cluster-url>?w=majority";
const client = new MongoClient(uri);
async function run() {
try {
await client.connect();
const database = client.db("sample_mflix");
const collection = database.collection("movies");
// query for movies that have a runtime less than 15 minutes
const query = { runtime: { $lt: 15 } };
const options = {
// sort returned documents in ascending order by title (A->Z)
sort: { title: 1 },
// Include only the `title` and `imdb` fields in each returned document
projection: { _id: 0, title: 1, imdb: 1 },
};
const cursor = collection.find(query, options);
// print a message if no documents were found
if ((await cursor.count()) === 0) {
console.log("No documents found!");
}
await cursor.forEach(console.dir);
} finally {
await client.close();
}
}
To me this somewhat implies that I would have to create a new connection for each DB request I make.
Is this correct? If not, then what is the best practise to keep the connection alive for various routes?
You can use mongoose to set a connection with your database.
mongoose.connect('mongodb://localhost:27017/myapp', {useNewUrlParser: true});
then you need to define your models which you will use to communicate with your DB in your routes.
const MyModel = mongoose.model('Test', new Schema({ name: String }));
MyModel.findOne(function(error, result) { /* ... */ });
https://mongoosejs.com/docs/connections.html
It's 2022 and I stumbled upon your post because I've been running into the same issue. All the tutorials and guides I've found so far have setups that require reconnecting in order to do anything with the Database.
I found one solution from someone on github, that creates a class to create, save and check if a client connection exist. So, it only recreates a client connection if it doesn't already exist.
const MongoClient = require('mongodb').MongoClient
class MDB {
static async getClient() {
if (this.client) {
return this.client
}
this.client = await MongoClient.connect(this.url);
return this.client
}
}
MDB.url='<your_connection_url>'
app.get('/yourroute', async (req, res) => {
try {
const client = await MDB.getClient()
const db = client.db('your_db')
const collection = db.collection('your_collection');
const results = await collection.find({}).toArray();
res.json(results)
} catch (error) {
console.log('error:', error);
}
})

NodeJs MongDB Update fails with ObjectID ref from another entity

In my NodeJS app I have this Update:
//here I am passing as string, but even as new ObjectID(docId) does not work.
let filter = { 'TepDocSignId' : docId, 'ClientId': clientId };
let update = { 'Solved' : true, SolvedOn : new Date()};
const resp = await db.collection(process.env.MONGODB_WARNING_COLLECTION_NAME)
.updateOne({ filter }, { '$set' : update });
On the Node it DOES NOT work, The matchedCount and modifiedCount are allways 0
But If I do the same update on RoboMongo, it works fine!
What is going on?
Is there some kind of profiler that I can see what is doing on the Node environment?
I am using :
"aws-sdk": "^2.590.0",
"mongodb": "^3.5.5",
And the data are:
process.env.MONGODB_WARNING_COLLECTION_NAME = 'Warning'
docId = '5e29197dac26760002f5a7b5'
clientId = '5caf91cd800fc20002cad0fb'
The full code is (here I am using the IDs as ObjectID, but does not matter if I parse or pass as string it does not match anyting...)
const MongoClient = require('mongodb').MongoClient;
const ObjectID = require('mongodb').ObjectID
let dbConnString = process.env.MONGODB_CONNECTION_STRING;
let dbName = process.env.MONGODB_DATABASE_NAME;
let db;
const client = new MongoClient(dbConnString, {
useNewUrlParser: true,
useUnifiedTopology: true
});
const createConn = async () => {
await client.connect();
db = client.db(dbName);
};
async function partnerWarningOff(docId, partnerId) {
if (!client.isConnected()) {
try {
await createConn();
} catch (e) {
throw new Error(`partnerWarningOff - OpenningConn Error: ${e}`);
}
}
console.log('process.env.MONGODB_WARNING_COLLECTION_NAME', process.env.MONGODB_WARNING_COLLECTION_NAME);
console.log('docId', docId);
console.log('partnerId', partnerId);
let dId = new ObjectID(docId);
let pId = new ObjectID(partnerId);
let filter = { 'TepDocSignId' : dId, 'PartnerId': pId };
let update = { 'Solved' : true, 'SolvedOn' : new Date()};
const resp = await db.collection(process.env.MONGODB_WARNING_COLLECTION_NAME)
.updateOne({ filter }, { '$set' : update });
console.log('resp', resp);
if (!resp) {
throw new Error(`partnerWarningOff Error solving tep warning with DocId ${docId}`);
}
};
cheers
Found the problem.
The handler was passing docId as ObjectID and the ClientId as string.
The only thing I had to do, was change the way the handler was calling the function, the correct way is
myfuncName(doc._id**.toString()**, doc.clientId).
This way I have all in string, the same way is stored in the other entity.
You can Follow this code..
const response = await db.collection.findOneAndUpdate({
TepDocSignId : dId,
PartnerId: pId
}, {
$set: req.body,
}, {
new: true
})

Mongoose, how to empty a collection

I've the following hapi.js server
const Hapi = require('hapi')
const Mongoose = require('mongoose')
const Wreck = require('wreck');
const server = new Hapi.Server({
"host": "localhost",
"port": 3000
})
Mongoose.connect('mongodb://localhost/myDB', { useNewUrlParser: true })
const BlockModel = Mongoose.model('block', {
height: Number,
size: Number,
time: Number
})
server.route({
method: "GET",
path: "/",
handler: async (request, h) => {
Mongoose.model.blocks.remove({}); //<------This is the part of the code I intend to use to delete the collection
const { res, payload } = await Wreck.get('https://api.url');
let myJson = JSON.parse(payload.toString()).blocks
console.log(myJson)
for (let i = 0; i<myJson.length; i++) {
var block = new BlockModel({ height: myJson[i].height, size: myJson[i].size, time: myJson[i].time });
block.save();
}
console.log(myJson)
return "test"
}
})
server.start();
Point is, it works fine and saves the desired data to my collection, but ofc the database will keep growing if I dont delete the data on each execution. So I intend to implement something similar to
db.blocks.remove({}) //where blocks is my collection
Which works fine in the mongoconsole.
But I cant find how to implement this in the code
You can use the deleteMany operator with an empty filter.
db.collection.deleteMany({})
or with your model:
await BlockModel.deleteMany({})
Empty all collections in a db
const collections = await mongoose.connection.db.collections();
for (let collection of collections) {
await collection.deleteMany({})
}

Resources