Cannot modify a WriteBatch that has been committed - node.js

I am trying to batch commit but when both the query returns something i am getting the error Cannot modify a WriteBatch that has been committed. how should i solve this issue? can anyone suggest any solution to do this is it to initialize two arrays and push the batch.commit() and then resolve the promise
module.exports = async (change) => {
try {
const timerSnapshot = change.before.data().timestamp;
console.log(timerSnapshot, "before");
const timerTimestampMinusOne = momentTz(timerSnapshot)
.tz("Asia/Kolkata")
.subtract(1, "days")
.valueOf();
const timerMinusThreeHours = momentTz(timerSnapshot)
.tz("Asia/Kolkata")
.subtract(3, "hours")
.valueOf();
const [profileSnapshot, threeHourReminder] = await Promise.all([
db
.collection("Profiles")
.where("lastQueryFrom", ">", timerTimestampMinusOne)
.get(),
db
.collection("Profiles")
.where("lastQueryFrom", ">", timerMinusThreeHours)
.get(),
]);
const batch = db.batch();
const batchArray = [];
console.log(profileSnapshot.docs.length, "profile");
console.log(threeHourReminder.docs.length, "three");
profileSnapshot.forEach((doc) => {
batch.set(
db.collection("Timers").doc(getISO8601Date()),
{
timestamp: momentTz().tz("Asia/Kolkata").valueOf(),
},
{ merge: true }
);
batchArray.push(batch.commit());
});
threeHourReminder.forEach((doc) => {
batch.set(
db.collection("Timers").doc(getISO8601Date()),
{
timestamp: momentTz().tz("Asia/Kolkata").valueOf(),
},
{ merge: true }
);
batchArray.push(batch.commit());
});
await Promise.all(batchArray);
} catch (error) {
console.error(error);
}
};

Related

Looping array from csv and update it to mongodb nodejs

So i try to read a CSV file to JSON array using node js, and i try to update it to mongodb, is it possible way to looping the data and update it to database base on the JSON using this code, if its not possible is there a way to do it asynchronously using node, because i keep getting promise issue?
here is my code:
import csvtojson from "csvtojson";
import { MongoClient } from "mongodb";
const csvFilePath = "./data.csv";
console.log(" ");
const uri = "mongodb://localhost:27017";
async function listDatabases(client) {
const databasesList = await client.db().admin().listDatabases();
console.log("Databases:");
databasesList.databases.forEach((db) => console.log(` - ${db.name}`));
}
async function main() {
const client = new MongoClient(uri);
try {
await client.connect();
await listDatabases(client);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
main().catch(console.error);
async function updateData() {
const client = new MongoClient(uri);
const data = await csvtojson().fromFile(csvFilePath);
console.log(data.length);
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
});
try {
await client.connect();
const db = client.db("nabatiform");
const stuff = db.collection("users");
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
const result = await stuff.findOneAndUpdate({})
});
console.log(result);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
updateData().catch(console.error);
here is my JSON that i read from CSV:
[{
NIK: '22000028',
Plant: 'Majalasgka',
fullname: 'FERI FsaYAH',
Group_Shift: 'R1',
Gedung_Zona: 'Gas A',
Sector: 'SEKTOas 08',
SPV: 'TasI SUasWATI'
},
{
NIK: '22000330',
Plant: 'Majaaka',
fullname: 'AYasdMAYANTI',
Group_Shift: 'NSHT',
Gedung_Zona: 'GEDU',
Sector: 'SE-08',
SPV: 'TI'
},
]
here is what its look like on my document on mongodb:
{
"_id": {
"$oid": "6369b17b11e02557349d8de5"
},
"fullname": "EGA PERMANA SAPUTRA",
"password": "$2b$10$TuKKwzIxmqvnJfR8LRV/zu1s.Gqpt4yANLAcNNFQ6pqTuLL82.00q",
"NIK": "17000691",
"status": "active",
"department": "Prodaucasdfation",
"position": "Foreasdman",
"Group_Shift": "R1",
"role": "user",
"__v": 0,
"createdAt": 1667871099,
"updatedAt": 1669025651,
"userInformation": {},
"plant": "Majasangka"
}
Use a forEach() loop to push each findOneAndUpdate() function to an array. Then execute all the promises asynchronously using Promise.all(). This is much faster than using await inside a map() or for loop.
async function updateData() {
const promises = [];
const client = new MongoClient(uri);
const data = await csvtojson().fromFile(csvFilePath);
console.log(data.length);
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
});
try {
await client.connect();
const db = client.db("nabatiform");
const stuff = db.collection("users");
data.forEach((datas) => {
console.log(Object.keys(datas), Object.values(datas));
// Push each promise to array
promises.push(stuff.findOneAndUpdate({}));
});
// Execute all promises
await Promise.all(promises);
console.log(result);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}
use await while calling updateData()
await updateData().catch(console.error);
I whould suggest for loop instead of map because if you want yo use async and wait in the map it just won't work while for loop does not have that problem.
I'm no javascript wizard, but hopefully this is helpful.
Perhaps a little modification of your updateData function will make the updates. I don't really have a way to test it.
async function updateData() {
const client = new MongoClient(uri);
const data = await csvtojson().fromFile(csvFilePath);
console.log(data.length);
data.map((datas) => {
// console.log(datas);
console.log(Object.keys(datas), Object.values(datas));
});
try {
await client.connect();
const db = client.db("nabatiform");
const stuff = db.collection("users");
// Below here is what I modified.
data.forEach(element => {
const filter = Object.fromEntries(Object.entries(element).filter(elem =>
elem[0] == "NIK"
));
const updateFields = Object.fromEntries(Object.entries(element).filter(elem =>
elem[0] != "NIK"
));
const update = {$set: updateFields};
const result = await stuff.findOneAndUpdate(filter, update);
});
console.log(result);
} catch (e) {
console.error(e);
} finally {
await client.close();
}
}

Problem to use a Map in Firebase Functions

I am trying to get the length of a Map and I keep getting "undefined". Could please someone tell me what am I doing wrong?
This is the part of the code that gives me problems.
const GYMdetail: { [key: string]: number} = {};
GYMdetail[`${doc.data().name} (${doc.data().personalID})`] = 650;
const subtotal = 650 * GYMdetail.size;
This is the complete function code
export const addGymMonthlyExpense =
functions.https.onRequest((request, response) => {
const query1 = admin.firestore().collection("users");
const query = query1.where("subscriptions.gym.active", "==", true);
query.get()
.then(async (allUsers) => {
allUsers.docs.forEach(async (doc) => {
if (doc != undefined) {
const houseForGym = doc.data().subscriptions.gym.house;
await admin.firestore()
.doc(`houses/${houseForGym}/expenses/2022-04`)
.get().then((snapshot) => {
if (snapshot.data() == undefined) {
console.log(`${houseForGym}-${doc.data().name}: CREAR!!`);
} else if (snapshot.data()!.issued == false) {
let detail: { [key: string]: any} = {};
const GYMdetail: { [key: string]: number} = {};
detail = snapshot.data()!.detail;
GYMdetail[
`${doc.data().name} (${doc.data().personalID})`
] = 650;
const subtotal = 650 * GYMdetail.size;
detail["GYM"] = {"total": subtotal, "detail": GYMdetail};
snapshot.ref.set({"detail": detail}, {merge: true});
}
return null;
})
.catch((error) => {
console.log(
`${houseForGym} - ${doc.data().name}: ${error}`);
response.status(500).send(error);
return null;
});
}
});
response.send("i");
})
.catch((error) => {
console.log(error);
response.status(500).send(error);
});
});
Since you are executing an asynchronous call to the database in your code, you need to return a promise from the top-level code; otherwise Cloud Functions may kill the container when the final } executes and by that time the database load won't be done yet.
So:
export const addGymMonthlyExpense =
functions.https.onRequest((request, response) => {
const query1 = admin.firestore().collection("users");
const query = query1.where("subscriptions.gym.active", "==", true);
return query.get()
...
Next you'll need to ensure that all the nested get() calls also get a chance to finish before the Functions container gets terminated. For that I recommend not using await for each nested get call, but a single Promise.all for all of them:
query.get()
.then(async (allUsers) => {
const promises = [];
allUsers.docs.forEach((doc) => {
const houseForGym = doc.data().subscriptions.gym.house;
promises.push(admin.firestore()
.doc(`houses/${houseForGym}/expenses/2022-04`)
.get().then((snapshot) => {
...
});
});
response.send("i");
return Promise.all(promises);
})
.catch((error) => {
console.log(error);
response.status(500).send(error);
});

Nodejs Streams - Help find my memory leak

So I have a process that selects from a table. I partition my select programmatically into 20 sub-selects. I then go through each on of those select and stream its data to an indexing client (solr). Every select memory jumps up and holds until I get an OOM.
I logged when each query went off and can be seen in in the following charts:
These correlate with each jump in the this memory graph:
14 of 20 queries ran before I oomed.
I see this behavior with code that is similar but with a delta that runs every 15 mins. Every delta holds some sort of memory until it eventually causes the server to crash with OOM (which recovers)
I have tried to track down issues with the delta past but gave up and just created a way to gracefully restart. What am I missing here?
Here is my entire code chain that makes this work... I know its a lot to look through but I figured as much detail as possible would help.
Library Stack:
"node": "~11.10.1"
"knex": "^0.20.9",
"oracledb": "^4.0.0"
"camelize2": "^1.0.0"
Knex - DB connection factory
'use strict'
const objection = require('objection')
const knex = require('knex')
module.exports = function ObjectionFactory(log) {
class MyObjection extends objection.Model {
constructor() {
super()
}
static get tableName() {
return ''
}
}
MyObjection.pickJsonSchemaProperties = true
log.info('Connecting to Oracle Pluggable...', {
host: 'myHost',
username: 'myUser',
database: 'myDatabase"
})
const knexInstance = knex({
client: 'oracledb',
connection: 'connectionInfo',
pool: {
min: 0,
max: 10
},
acquireConnectionTimeout: 10000
})
process.once('SIGINT', () => {
log.info('Disconnecting from Oracle Pluggable.')
knexInstance.destroy()
.then(() => process.exit(0))
.catch(() => process.exit(1))
})
// Shut down cleanly for nodemon
process.once('SIGUSR2', () => {
log.info('Disconnecting from Oracle Pluggable')
knexInstance.destroy()
.then(() => process.kill(process.pid, 'SIGUSR2'))
.catch(() => process.kill(process.pid, 'SIGUSR2'))
})
const knexBoundClass = MyObjection.bindKnex(knexInstance)
knexBoundClass.tag = 'Oracle Connection'
return knexBoundClass
}
My Select Stream Code:
module.exports = function oracleStream(log, MyObjection) {
const knex = MyObjection.knex()
const fetchArraySize = 10000
const outFormat = oracledb.OBJECT
return {
selectStream
}
async function selectStream(sql, bindings = [], fetchSize = fetchArraySize) {
let connection = await knex.client.acquireConnection()
log.info(`Fetch size is set to ${fetchSize}`)
let select = connection.queryStream(sql, bindings, {
fetchArraySize: fetchSize,
outFormat: outFormat
})
select.on('error', (err) => {
log.error('Oracle Error Event', err)
knex.client.releaseConnection(connection)
})
select.on('end', () => {
log.info('Destroying the Stream')
select.destroy()
})
select.on('close', () => {
log.info('Oracle Close Event')
knex.client.releaseConnection(connection)
select = null
connection = null
})
return select
}
}
My index/stream pipeline code
async function indexJob() {
const reindexStartTime = new moment().local()
let rowCount = 0
log.info('Reindex Started at', reindexStartTime.format())
let queryNumber = 1
const partitionedQueries = ['Select * from table where 1=1', 'Select * from table where 2=2', 'Select * from table where 3=3'] //There would be 20 queries in this array
let partitionedQueriesLength = partitionedQueries.length
while (partitionedQueries.length > 0) {
let query = partitionedQueries.pop()
log.info('RUNNING Query', {
queryNumber: `${queryNumber++} of ${partitionedQueriesLength}`,
query: query
})
let databaseStream = await oracleStream.selectStream(query, [], 10000) //10k represents the oracle fetch size
databaseStream.on('data', () => {
rowCount++
})
let logEveryFiveSec = setInterval(() => {
log.info('Status: ', getReindexInfo(reindexStartTime, rowCount))
}, 5000)
try {
let pipeline = util.promisify(stream.pipeline)
await pipeline(
databaseStream,
camelizeAndStringify(),
streamReindex(core)
)
} catch (err) {
databaseStream.destroy(err)
throw new JobFailedError(err)
} finally {
databaseStream.destroy()
clearInterval(logEveryFiveSec)
}
}
}
function camelizeAndStringify() {
let first = true
const serialize = new Transform({
objectMode: true,
highWaterMark: 1000,
transform(chunk, encoding, callback) {
if (first) {
this.push('[' + JSON.stringify(camelize(chunk)))
first = false
} else {
this.push(',' + JSON.stringify(camelize(chunk)))
}
callback()
chunk = null
},
flush(callback) {
this.push(']')
callback()
}
})
return serialize
}
function streamReindex(core) {
const updateUrl = baseUrl + core + '/update'
const options = {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
'auth': `${user.username}:${user.password}`,
}
let postStream = https.request(updateUrl, options, (res) => {
let response = {
status: {
code: res.statusCode,
message: res.statusMessage
},
headers: res.headers,
}
if (res.statusCode !== 200) {
postStream.destroy(new Error(JSON.stringify(response)))
}
})
postStream.on('error', (err)=>{
throw new Error(err)
})
postStream.on('socket', (socket) => {
socket.setKeepAlive(true, 110000)
})
return postStream
}
EDIT 1:
I tried removing knex out of the equation by doing a singular connection to my db with the oracle library. Unfortunately I still see the same behavior.
This is how I changed my select to not use knex
async function selectStream(sql, bindings = [], fetchSize = fetchArraySize) {
const connectionInfo = {
user: info.user,
password: info.password,
connectString: info.host +'/'+info.database
}
const connection = await oracledb.getConnection(connectionInfo)
log.info('Connection was successful!')
log.info(`Fetch size is set to ${fetchSize}`)
let select = connection.queryStream(sql, bindings, {
fetchArraySize: fetchSize,
outFormat: outFormat
})
select.on('error', async (err) => {
log.error('Oracle Error Event', err)
await connection.close()
})
select.on('end', () => {
log.info('Destroying the Stream')
select.destroy()
})
select.on('close', async () => {
log.info('Oracle Close Event')
await connection.close()
})
return select
}

How to run multiple select query at once in sequelize using single await

Have to run both codes at a single time. Don't want to wait and then fire next query but wait for both query execution complete.
products = await Product.findAll()
.then(data => {
return data;
})
.catch(error => {
//
});
variationProducts = await VariationProduct.findAll()
.then(data => {
return data;
})
.catch(error => {
//
});
You may choose
const [ productsPromise, variationProductsPromise ] = await Promise.all([Product.findAll(), VariationProduct.findAll()]);
OR
const [ productsPromise, variationProductsPromise ] = await { Product.findAll(), VariationProduct.findAll()}
try {
const [products, variationProducts] = await Promise.all([
Product.findAll(),
VariationProduct.findAll()
]);
// Do what you need with the result;
}
catch(e) {
console.error('Problem in getting data', e);
throw e; // Or do what you want.
}
You can do it like this:
Promise.all([Product.findAll(), VariationProduct.findAll()]).then(data => {
// data [0] is products
// data [1] is variationProducts
}).catch(error => {
// oops some error
});
const products = Product.findAll();
const variationProducts = VariationProduct.findAll();
const productsPromise = await products;
const variationProductsPromise = await variationProducts;

Seeding mongoDB data in node.js by referencing ObjectId

i'm using mongoose-data-seed to seed data into mongodb, however it has no mechanism to allow passing of ObjectId() as references to other seed files
I found a way to store the output of each of the seeders in a json file and retrieve the ObjectIds from the previous seeds to use in the current seeder. This way i can reference ObjectIds from previous seeders.
seeding-helper.js
const fs = require('fs');
// const path = require('path');
const seedersTmpDataFolder = 'seeders/bin';
class SeedingHelper {
static saveData(filename, data) {
return new Promise((resolve) => {
fs.writeFile(`${seedersTmpDataFolder}/${filename}.json`, JSON.stringify(data, null, '\t'), (err) => {
if (err) throw err;
resolve();
});
});
}
static readData(filename) {
return new Promise((resolve) => {
fs.readFile(`${seedersTmpDataFolder}/${filename}.json`, 'utf8', (err, data) => {
if (err) throw err;
resolve(JSON.parse(data));
});
});
}
}
module.exports = SeedingHelper;
resourceActions.seeder.js
const { Seeder } = require('mongoose-data-seed');
const mongoose = require('mongoose');
const ResourceAction = require('../models/resourceAction');
const SeedingHelper = require('../helpers/seeding-helper');
const { Types: { ObjectId } } = mongoose;
const data = [
{
_id: ObjectId(),
name: 'test1'
},
{
_id: ObjectId(),
name: 'test2'
},
];
class ResourceActionSeeder extends Seeder {
async shouldRun() { // eslint-disable-line class-methods-use-this
return ResourceAction.count().exec().then(count => count === 0);
}
async run() { // eslint-disable-line class-methods-use-this
let result;
await SeedingHelper.saveData('resourceActions', data)
.then(() => {
result = ResourceAction.create(data);
});
return result;
}
}
module.exports = ResourceActionSeeder;
resources.seeder.js
const { Seeder } = require('mongoose-data-seed');
const mongoose = require('mongoose');
const Resource = require('../models/resource');
const SeedingHelper = require('../helpers/seeding-helper');
const { Types: { ObjectId } } = mongoose;
class ResourcesSeeder extends Seeder {
async shouldRun() { // eslint-disable-line class-methods-use-this
return Resource.count().exec().then(count => count === 0);
}
async run() { // eslint-disable-line class-methods-use-this
let result;
await SeedingHelper.readData('resourceActions')
.then((resourceActionsData) => {
const machinesId = ObjectId();
const actionTest1 = ObjectId(resourceActionsData.find(x => x.name === 'test1')._id);
const actionTest2 = ObjectId(resourceActionsData.find(x => x.name === 'test2')._id);
const data = [
{
_id: machinesId,
name: 'machines',
actions: [
actionTest1,
actionTest2,
],
},
];
result = Resource.create(data);
if (result) SeedingHelper.saveData('resources', data);
});
return result;
}
}
module.exports = ResourcesSeeder;

Resources