ClientSession cannot be serialized to BSON - Mongodb Transaction - node.js

I'm fairly new to mongodb transaction so I have no idea what I did wrong. I've followed the direction in the documentation but I still get this error
{
"status": 400,
"name": "Error",
"message": "ClientSession cannot be serialized to BSON."
}
Here's my code
async createInsurance(params) {
const session = await db99.startSession();
try {
session.startTransaction();
const doc = await db.insurance
.find({ session })
.sort({ createdAt: -1 })
.limit(1)
.lean();
const latest = doc[0];
const payload = { ...params, id: latest ? latest.id + 1 : 1 };
const data = await db.insurance.create(payload, { session });
await mongoTransaction.commitWithRetry(session);
const result = defaultResult('SuccessCreateInsurance', SuccessCreateInsurance[this.lang], data, 200);
return result;
} catch (error) {
logger.log('error', 'ProductService-createInsurance', { error });
throw error;
} finally {
await session.endSession();
}
}
Is there anything I missed?

The documentation for Model.create indicates that the documents must be passed in an array in order to specify options:
[options] «Object» Options passed down to save(). To specify options, docs must be an array, not a spread.
Change
const payload = { ...params, id: latest ? latest.id + 1 : 1 };
to
const payload = [{ ...params, id: latest ? latest.id + 1 : 1 }];

turns out I have to pass session on the 3rd parameter on find method and wrap payload as an array on create method cause it can only take an array.
the code will look like this
async createInsurance(params) {
const session = await db99.startSession();
session.startTransaction();
try {
const doc = await db.insurance
.findOne(null, null, { session })
.sort({ createdAt: -1 })
.limit(1)
.lean();
const latest = doc[0];
const payload = { ...params, id: latest ? latest.id + 1 : 1 };
const data = await db.insurance.create([payload], { session });
await session.commitTransaction();
const result = defaultResult('SuccessCreateInsurance', SuccessCreateInsurance[this.lang], data, 200);
return result;
} catch (error) {
await session.abortTransaction();
logger.log('error', 'ProductService-createInsurance', { error });
throw error;
} finally {
await session.endSession();
}
}

Related

Looping array from csv and update it to mongodb nodejs

So i try to read a CSV file to JSON array using node js, and i try to update it to mongodb, is it possible way to looping the data and update it to database base on the JSON using this code, if its not possible is there a way to do it asynchronously using node, because i keep getting promise issue?
here is my code:
import csvtojson from "csvtojson";
import { MongoClient } from "mongodb";
const csvFilePath = "./data.csv";
console.log(" ");
const uri = "mongodb://localhost:27017";
async function listDatabases(client) {
const databasesList = await client.db().admin().listDatabases();
console.log("Databases:");
databasesList.databases.forEach((db) => console.log(` - ${db.name}`));
}
async function main() {
const client = new MongoClient(uri);
try {
await client.connect();
await listDatabases(client);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
main().catch(console.error);
async function updateData() {
const client = new MongoClient(uri);
const data = await csvtojson().fromFile(csvFilePath);
console.log(data.length);
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
});
try {
await client.connect();
const db = client.db("nabatiform");
const stuff = db.collection("users");
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
const result = await stuff.findOneAndUpdate({})
});
console.log(result);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
updateData().catch(console.error);
here is my JSON that i read from CSV:
[{
NIK: '22000028',
Plant: 'Majalasgka',
fullname: 'FERI FsaYAH',
Group_Shift: 'R1',
Gedung_Zona: 'Gas A',
Sector: 'SEKTOas 08',
SPV: 'TasI SUasWATI'
},
{
NIK: '22000330',
Plant: 'Majaaka',
fullname: 'AYasdMAYANTI',
Group_Shift: 'NSHT',
Gedung_Zona: 'GEDU',
Sector: 'SE-08',
SPV: 'TI'
},
]
here is what its look like on my document on mongodb:
{
"_id": {
"$oid": "6369b17b11e02557349d8de5"
},
"fullname": "EGA PERMANA SAPUTRA",
"password": "$2b$10$TuKKwzIxmqvnJfR8LRV/zu1s.Gqpt4yANLAcNNFQ6pqTuLL82.00q",
"NIK": "17000691",
"status": "active",
"department": "Prodaucasdfation",
"position": "Foreasdman",
"Group_Shift": "R1",
"role": "user",
"__v": 0,
"createdAt": 1667871099,
"updatedAt": 1669025651,
"userInformation": {},
"plant": "Majasangka"
}
Use a forEach() loop to push each findOneAndUpdate() function to an array. Then execute all the promises asynchronously using Promise.all(). This is much faster than using await inside a map() or for loop.
async function updateData() {
const promises = [];
const client = new MongoClient(uri);
const data = await csvtojson().fromFile(csvFilePath);
console.log(data.length);
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
});
try {
await client.connect();
const db = client.db("nabatiform");
const stuff = db.collection("users");
data.forEach((datas) => {
console.log(Object.keys(datas), Object.values(datas));
// Push each promise to array
promises.push(stuff.findOneAndUpdate({}));
});
// Execute all promises
await Promise.all(promises);
console.log(result);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
use await while calling updateData()
await updateData().catch(console.error);
I whould suggest for loop instead of map because if you want yo use async and wait in the map it just won't work while for loop does not have that problem.
I'm no javascript wizard, but hopefully this is helpful.
Perhaps a little modification of your updateData function will make the updates. I don't really have a way to test it.
async function updateData() {
const client = new MongoClient(uri);
const data = await csvtojson().fromFile(csvFilePath);
console.log(data.length);
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
});
try {
await client.connect();
const db = client.db("nabatiform");
const stuff = db.collection("users");
// Below here is what I modified.
data.forEach(element => {
const filter = Object.fromEntries(Object.entries(element).filter(elem =>
elem[0] == "NIK"
));
const updateFields = Object.fromEntries(Object.entries(element).filter(elem =>
elem[0] != "NIK"
));
const update = {$set: updateFields};
const result = await stuff.findOneAndUpdate(filter, update);
});
console.log(result);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}

mongodb find all slow

I'm new with nodejs and mongodb. I have a simple request which will return 800 entities without any where statements. Just find all. or explain();
Very slow responder.....
Is there a more better way to do collection.find().lean() ?
const Jobs = require("../model/Jobs.mongo");
const saveData = async (title, link, location, idJob, companyName) => {
const found = await Jobs.findOne({ idJob: idJob });
if (!found) {
try {
const job = new Jobs({
title: title,
link: link,
location: location,
idJob: idJob,
companyName: companyName,
});
await job.save();
console.log(job);
} catch (e) {
console.log(e);
}
} else {
console.log(`${title} ***** is already in the data with id ***** ${idJob}`);
}
};
const getAllJobs = async (req, res) => {
const jobs = await Jobs.find({}).sort({ createdAt: "desc" }).lean();
res.status(200).json({ jobs, count: jobs.length });
};
const getJobByCompany = async (req, res) => {
const {
params: { companyName: companyName },
} = req;
const job = await Jobs.find({
companyName: companyName,
});
if (!job) {
res.status(404).json({});
}
res.status(200).json({ job, count: job.length });
};
module.exports = {
saveData,
getAllJobs,
getJobByCompany,
};
If you are facing this issue for a while try to check you internet connection.
You can also take a look how much is the data that you want to receive.
It can be just from a internet speed drop, let me know what is the result :)

how to unset (delete) field in findByIdAndUpdate in mongoDB

I have an API that update a post and I want when a field is passed as undefined to delete this field or at least set it to undefined.
Here is my current implementation but it's not working:
const finalData = {
_id,
...,
mileage,
};
const result = await provider.updatePost(finalData);
and my provider
updatePost: async (payload) => {
const { _id, ...newData } = payload;
const result = await Model.findByIdAndUpdate(_id, newData, {
new: true,
omitUndefined: true,
});
await indexPost(result);
if (!result) {
throw new NotFound('No post found');
}
return result;
}

Nodejs exports returns undefined on mongoose Insertion

I have created nodejs application by organising as module structure , The problem I am facing is that a mongodb insertion return undefined value from one of my controller, The issue I found is that my async funtion doesn't wait to complete my mongodb operation But I could not find a solution for that, my route and controller code is given below
route.js
const {
createEvent, editEvent
} = require('./controller');
router.post("/event/create", validateEventManage, isRequestValidated, async(req, res) => {
let data = {};
data.body = req.body;
try{
let event = await createEvent(req.body);
console.log(event) // returned undefined
data.event = event;
res.status(200).json(data);
}catch(error){
console.log(error)
res.status(200).json({error:error});
}
});
controller.js
exports.createEvent = async(data) => {
// return "test" // This works correctly
const eventObj = {
name : data.name,
description : data.desc,
type : data.type,
startDate : new Date()
}
const event = await new Event(eventObj);
await event.save((error,event)=>{
if(error) {
return error;
}
if(event){
return event;
}
});
}
You should not await the new Event constructor.
Also, since you are using async - await you can
remove the callback from the save and try ... catch the error to handle it:
exports.createEvent = async (data) => {
// return "test" // This works correctly
const eventObj = {
name: data.name,
description: data.desc,
type: data.type,
startDate: new Date(),
};
try {
const event = new Event(eventObj);
await event.save();
return event;
} catch (error) {
return error;
}
};

Storing ArrayBuffer in mongodb with mongoose

I am trying to store the book's array buffer in MongoDB with the help of mongoose. I am getting some error related to validation I hope you guys can help me.
Here is my back-end:
This is my schema
...
const UserSchema = new mongoose.Schema({
book: {
type: Buffer,
},
});
...
router:
...
router.post("/api/book", auth, async (req, res) => {
try {
const { user } = req;
user.book = req.body.book;
await user.save();
res.send({ success: true });
} catch (error) {
console.log(error.name, error.message);
res.send({ success: false, error });
}
});
...
Here is my front-end:
This is the place where I did a post request
...
getBook = async (book) => {
console.log(book);
if (book) {
try {
const res = await axios.post(
"/api/book",
{ book },
{
headers: {
Authorization:
"eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjVlYTJlOWJlZmI0OTVhMDZhZTkzZjZmNyIsImlhdCI6MTU4NzczNDk3NH0.fzoJcHbkVhkgYZZgYP0N1XNdqf6uowAVrekntuulOu0",
},
}
);
} catch (error) {
console.log(error.name, error.message);
}
}
...
this is the place where o get the book and converted it into array buffer and passed it to above code
...
handleOnChange = (event) => {
if (event.target.files[0].type === "application/epub+zip") {
let reader = new FileReader();
reader.readAsArrayBuffer(event.target.files[0]);
reader.onload = () => {
this.props.getBook(reader.result);
};
} else {
alert("Only .epub Files are supported");
}
};
...
and here is my error at server side
ValidationError User validation failed: book: Cast to Buffer failed for value "{}" at path "book"
POST /api/book 200 198.131 ms - 533
I think it is because I am using the wrong data type in the mongoose schema

Resources