Exponential memory growth Node.js, memory leak? - node.js

I'm making a script to fetch blockchain transactions, however the memory usage by the process grows untill it uses around 4 gigabytes and crashes. I suspect it is a memory leak and has something to do with the garbage collection mechanism.
async function getTransactions(blockNumber) {
if (blockNumber < 15000000) {
let block = await web3.eth.getBlock(blockNumber);
for (let i = 0; i < block.transactions.length; i++) {
let transaction = await web3.eth.getTransactionReceipt(block.transactions[i]);
for (let j = 0; j < transaction.logs.length; j++) {
try {
if (transaction.logs[j].topics[0] == "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef") {
let parsed1 = transaction.logs[j].topics[1].replace("0x", "").replace(/^0+/, '');
let parsed2 = transaction.logs[j].topics[2].replace("0x", "").replace(/^0+/, '');;
let contract = new web3.eth.Contract(erc20_abi, transaction.logs[j].address)
let decimals = await contract.methods.decimals().call();
let transfer = {
from: "0x" + parsed1,
to: "0x" + parsed2,
token: transaction.logs[j].address,
amount: parseInt(transaction.logs[j].data) / (10**decimals),
block: block.number,
timestamp: block.timestamp,
hash: transaction.logs[j].transactionHash,
}
console.log(transfer);
await createTransfer(transfer)
}
} catch (error) {
}
}
}
}
}
(async () => {
await connectdb();
for (let i = await findMax(); i < 15000000; i++) {
await getTransactions(i);
}})();

Related

How can i reduce time complexity limit to 1s

I was doing practice problems on codechef and i was in the flow that I solved 1 question then another and so on.While solving all the runtime,compiletime and logical error.
Then suddenly 1 error occurred which is to hard for me to solve and need some guidence over it
The Node Js code which i want to reduce time complexity limit for is as follow:-
process.stdin.resume();
process.stdin.setEncoding('utf8');
// your code goes here
let inputString = '';
let currentLine = 0;
process.stdin.on('data',(input) => {
inputString += input;
});
process.stdin.on('end', () => {
inputString = inputString.split('\n');
main();
});
const readline = () => {
return inputString[currentLine++];
}
const main = () => {
let t = parseInt(readline());
while(t--){
let temp = readline().split('');
let count = 0;
for(let i = 0; i < temp.length; i++){
if(temp[i] === '4'){
count++;
}
}
console.log(count);
}
}
Please help me with it ...

Error about await that can't be used in non async questions

I was making a play.js command then it started to get this error:
SyntaxError: await is only valid in async function
Here is the code:
for (let k = 0; k < 2; k++) {
// I am unable to make it dynamic so lets take 2 as an example
await dmb.page.waitForSelector(
"a > div > div.Igw0E.rBNOH.YBx95.ybXk5._4EzTm.soMvl > div[class]"
);
await dmb.page.click(
"a > div > div.Igw0E.rBNOH.YBx95.ybXk5._4EzTm.soMvl > div[class]"
);
await dmb.page.waitFor(2000);
for (let i = 0; i < textbycomma.length; i++) {
let type = textbycomma[i];
await dmb.page.waitForSelector('textarea[placeholder="Message..."]');
await dmb.page.type('textarea[placeholder="Message..."]', type, {
delay: 100,
});
let ig = await dmb.page.$x('//button[contains(text(), "Send")]');
await ig[0].click();
await dmb.page.waitFor(2000);
let readurls = [];
readurls[i] = dmb.page.url();
console.log(readurls);
if ((i = textbycomma.length)) {
break;
}
}
await dmb.page.goto("https://page.com/page-example");
}
await dmb.browser.close();
Unlike Python, You can't just indent and expect your code to wrap inside the function. You need to use curly braces.
exports.run = async (...) => {
// Your code
}

Batch 500 writes into firestore loop from json file

Using some inspiration I got from this thread and reply I tried to get my loop working which is to write into firestore in batches. But somehow I only can only update 1 document even if I can see I iterate through different values from my array. I load data into an array and work from there.
const db = admin.firestore();
const jsonStream = StreamArray.withParser();
let arr = []
jsonStream.on('data', ({ key, value }) => {
arr.push(value);
});
jsonStream.on('end', () => {
var counter = 0;
var commitCounter = 0;
var batches = [];
arr.forEach((a, ind) => {
batches[commitCounter] = db.batch();
if (counter <= 498) {
var thisRef = db.collection('Testing').doc(a.id);
console.log("id")
console.log(a.id);
batches[commitCounter].set(thisRef, { ...a });
counter = counter + 1;
} else {
counter = 0;
commitCounter = commitCounter + 1;
batches[commitCounter] = db.batch();
}
})
for (var i = 0; i < batches.length; i++) {
if(i==0)
{
console.log(batches[0])
}
batches[i].commit().then(function () {
console.count('wrote batch');
});
}
});
const filename = path.join(__dirname, 'mydata.json');
fs.createReadStream(filename).pipe(jsonStream.input);
Following line gets executed on each iteration, which essentially "resets" your batch on each round:
batches[commitCounter] = db.batch();
So at the end each of your batches will only contain one document write.

How to handle a long request with Express

I'm working on a simple function I have for a specific GET request triggered in the browser. The objective of this request is to make multiple queries to a mongodb (mongoose) database and then perform some calculation and structure formating on the results to send it back to the browser.
The only problem is that everything takes too long and it results in an error in the browser:
net::ERR_EMPTY_RESPONSE
to give an example of part of the function I'm trying to build here it goes:
async function getPriceByMake(makes, id) {
return new Promise(async (resolve, reject) => {
let pMakes = {};
const makesArr = Object.keys(makes);
for (let i = 0; i < makesArr.length; i++) {
console.log('Getting the docs ... ' + Math.round(i/makesArr.length*100) + '%')
const currMake = makesArr[i];
pMakes[currMake] = {};
const modelsArr = Object.keys(makes[currMake]);
for (let j = 0; j < modelsArr.length; j++) {
const currModel = modelsArr[j];
await Listing.find({ catFrom: id, model: currModel }, 'year asking', (err, docs) => {
if (docs.length > 1) {
pMakes[currMake][currModel] = [docs];
} else {
pMakes[currMake][currModel] = {};
}
});
}
}
resolve(pMakes);
});
}
In this function, if I leave the async / await out, I get an empty {} on the other end. Which is obviously not the objective.
I've been searching the web a little and was able to find an article pointing to this scheme:
Browser:
Initiates request
displays progress
Show result
WebServer:
Submit event
Checks for completion
Return result
BackEndApp:
Picks up event
Runs task
Returns results
My question is the following:
How can I do that with NodeJS and Express?
In this code:
for (let j = 0; j < modelsArr.length; j++) {
const currModel = modelsArr[j];
await Listing.find({ catFrom: id, model: currModel }, 'year asking', (err, docs) => {
if (docs.length > 1) {
pMakes[currMake][currModel] = [docs];
} else {
pMakes[currMake][currModel] = {};
}
});
}
Your await isn't working because you're passing a callback to Listing.find(). When you do that, it does NOT return a promise and therefore the await does nothing useful. You get the empty response because the await doesn't work and thus you call resolve() before there's any actual data there.
Change the code to this:
for (let j = 0; j < modelsArr.length; j++) {
const currModel = modelsArr[j];
let docs = await Listing.find({ catFrom: id, model: currModel }, 'year asking');
if (docs.length > 1) {
pMakes[currMake][currModel] = [docs];
} else {
pMakes[currMake][currModel] = {};
}
}
And, then the await will work properly.
You also should remove the return new Promise() wrapper. You don't want that. Just make the function async and use await and it will already return a promise.
Here's your function with the unnecessary promise wrapper removed:
async function getPriceByMake(makes, id) {
let pMakes = {};
const makesArr = Object.keys(makes);
for (let i = 0; i < makesArr.length; i++) {
console.log('Getting the docs ... ' + Math.round(i/makesArr.length*100) + '%')
const currMake = makesArr[i];
pMakes[currMake] = {};
const modelsArr = Object.keys(makes[currMake]);
for (let j = 0; j < modelsArr.length; j++) {
const currModel = modelsArr[j];
let docs = await Listing.find({ catFrom: id, model: currModel }, 'year asking');
if (docs.length > 1) {
pMakes[currMake][currModel] = [docs];
} else {
pMakes[currMake][currModel] = {};
}
}
}
return pMakes;
}
Then, keep in mind that whatever code sends your actual response needs to use .then() or await when calling this async function in order to get the final result.
Your best bet to speed up this code would be to refactor either your queries or your database structure or both to not have to do N * M separate queries to get your final result. That's likely where your slowness is coming from. The biggest performance gains will probably come from reducing the number of queries you have to run here to far fewer.
Depending upon your database configuration and capabilities, it might speed things up to run the inner loop queries in parallel as shown here:
async function getPriceByMake(makes, id) {
let pMakes = {};
const makesArr = Object.keys(makes);
for (let i = 0; i < makesArr.length; i++) {
console.log('Getting the docs ... ' + Math.round(i/makesArr.length*100) + '%')
const currMake = makesArr[i];
pMakes[currMake] = {};
const modelsArr = Object.keys(makes[currMake]);
await Promise.all(modelsArr.map(async currModel => {
let docs = await Listing.find({ catFrom: id, model: currModel }, 'year asking');
if (docs.length > 1) {
pMakes[currMake][currModel] = [docs];
} else {
pMakes[currMake][currModel] = {};
}
}));
}
return pMakes;
}

NodeJS: How to wait for the HTTP Get request is complete in For Loop?

I have a for loop function in NodeJS. I would like to wait until the result of Http Get request is completed in For Loop before it executes the next iteration, how do I achieve that?
for (let k=0; k<fd.length; k++) {
url = fd[k].nct_id;
HttpSearch({condition: url}).then(trials => {
//Get the result first before execute the next iteration
console.log(trials);
});
}
You should make the for-loop async:
const main = async () => {
for (let k = 0; k < fd.length; k++) {
const url = fd[k].nct_id;
const trials = await HttpSearch({ condition: url });
console.log(trials);
}
};
main().catch(console.error);
This will cause the loop to "pause" at each HttpSearch.
I will do like this
let k = 0 ;
let len = fd.length;
for (; k > len;) {
let url = fd[k].nct_id;
let subs = await HttpSearch({condition: url});
console.log(subs);
k++
}
or like this with promise
let url;
let promiseChain = Promise.resolve();
for (let i = 0; i < fd.length; i++) {
url = fd[k].nct_id;
// you need to pass the current value of `url`
// into the chain manually, to avoid having its value
// changed before the .then code accesses it.
const makeNextPromise = (url) => () => {
HttpSearch({condition: url})
.then((result) => {
// return promise here
return result
});
}
promiseChain = promiseChain.then(makeNextPromise(url))
}
This is using recursion, which calls next, once previous is finished
var limit = fd.length;
var counter = 0;
HttpSearch({condition: fd[0].nct_id;}).then(yourCallBack);
function yourCallBack(trials){
console.log(trails);
if(counter == limit)
return console.log('Done')
HttpSearch({condition: fd[counter].nct_id;}).then(yourCallBack);
counter++;
}

Resources