MongoError: connection 4 to cluster closed - node.js

I have a function in NodeJS using Mongoose driver like below:
Pseudocode:
function someFn(someParams) {
// Step 1: a couple of very fast mongo queries (in milliseconds)
// Step 2: HUGE CPU processing - think millions of data grouped, mapped, etc. (takes about a minute)
// Step 3: another mongo query which inserts the results from Step 2 into a collection
}
At step 3, I get the following error:
MongoError: connection 4 to cluster closed
at Function.MongoError.create (/home/some-user/my-repo/node_modules/mongodb-core/lib/error.js:29:11)
at TLSSocket.<anonymous> (/home/some-user/my-repo/node_modules/mongodb-core/lib/connection/connection.js:202:22)
at Object.onceWrapper (events.js:293:19)
at emitOne (events.js:101:20)
at TLSSocket.emit (events.js:191:7)
at _handle.close (net.js:513:12)
at Socket.done (_tls_wrap.js:332:7)
at Object.onceWrapper (events.js:293:19)
at emitOne (events.js:101:20)
at Socket.emit (events.js:191:7)
at TCP._handle.close [as _onclose] (net.js:513:12)
My MongoDB connection params are as follows:
mongoose.connect(connStr, {
server: {
socketOptions: {
keepAlive: 300000,
connectTimeoutMS: 300000,
socketTimeoutMS: 300000,
auto_reconnect: true
}
}
});
I don't understand why I'm getting this error at Step 3. Can someone help me out with this, please?

Figured out the issue after hours of debugging. My Step 3 mongoose query had too many documents(in the order of millions, from Step 2). The error from mongoose gives no reason why the connection is closing. A message like Too many documents or Too large query would've gone a long way in saving a lot of time.

Related

Inserting json into postgresql using a for loop doesn't insert all my data?

so I've been working on inserting my JSON data into google cloud postgresql for a few days now and am running across an issue where not even close to all of my data is inserted. Here is my code:
//prior to this i am connecting to cloud using pg and creating tables InterestClubs and FilterClubs
//alldata a json file, an array of about 3000 objects
let count = 0; //incrementing this every time i loop through
for(const club of alldata){
client.query("INSERT INTO InterestClubs (name, clublink, university, description, logo, interests) VALUES ('"+club.title+"', '"+club.clubLink+"', '"+club.university+"', '"+club.descriptionSnippet+"', '"+club.logoLink+"', '"+club.interests+"')")
client.query("INSERT INTO FilterClubs (name, clublink, university, description, logo, polfilters, relfilters, culfilters) VALUES ('"+club.title+"', '"+club.clubLink+"', '"+club.university+"', '"+club.descriptionSnippet+"', '"+club.logoLink+"', '"+club.politicalFilters+"', '"+club.religiousFilters+"', '"+club.culturalFilters+"')");
count++;
}
console.log(count); //outputs 3000 (or however many clubs there are in the json file)
I seem to be successfully looping through the data 3000 times (leading me to believe that I have inserted 3000 objects), but when I run a query such as SELECT * FROM InterestClubs (using either node/express or the cloud shell), I only receive 19 objects in return. I thought it may have something to do with having to wait a certain amount of time to allow the client.queries to successfully connect and insert, but when I used async/await (awaiting in front of each query), nothing changed. Also, I am getting this error every time I try and insert (after the count is printed)
3611
events.js:174
throw er; // Unhandled 'error' event
^
error: syntax error at or near "s"
at Connection.parseE (C:\Users\User\Documents\Code\Personal_Projects\clubhaus\node_modules\pg\lib\connection.js:539:11)
at Connection.parseMessage (C:\Users\User\Documents\Code\Personal_Projects\clubhaus\node_modules\pg\lib\connection.js:366:17)
at Socket.<anonymous> (C:\Users\User\Documents\Code\Personal_Projects\clubhaus\node_modules\pg\lib\connection.js:105:22)
at Socket.emit (events.js:198:13)
at addChunk (_stream_readable.js:288:12)
at readableAddChunk (_stream_readable.js:269:11)
at Socket.Readable.push (_stream_readable.js:224:10)
at TCP.onStreamRead [as onread] (internal/stream_base_commons.js:94:17)
Emitted 'error' event at:
at Query.handleError (C:\Users\User\Documents\Code\Personal_Projects\clubhaus\node_modules\pg\lib\query.js:108:8)
at Connection.emit (events.js:198:13)
at Socket.<anonymous> (C:\Users\User\Documents\Code\Personal_Projects\clubhaus\node_modules\pg\lib\connection.js:109:12)
at Socket.emit (events.js:198:13)
[... lines matching original stack trace ...]
at TCP.onStreamRead [as onread] (internal/stream_base_commons.js:94:17)
This makes me think that something about the 19th object could be triggering a syntax error, causing the queries to stop inputting but still looping through them, but I'm not sure. Any help would be appreciated!!
Check if that 20th object has unescaped quote characters in one of the properties you are using. If u are using npm package pg you can automatically escape those characters passing variable arguments to your insert statements like this:
client.query("INSERT INTO InterestClubs (name, clublink, university, description, logo, interests) VALUES ($1, $2, $3, $4, $5, $6)", [club.title, club.clubLink, club.university, club.descriptionSnippet, club.logoLink, club.interests])

MongoDB Cursor Not Found [duplicate]

This question already has answers here:
MongoDB - Error: getMore command failed: Cursor not found
(6 answers)
Closed 4 years ago.
I am using mongodb's (mongoose module) with node js
and i am processing around 1,00,00,000 documents in (1000 bunch each) using limit and skip functionalities.
my processing fine but after some time it gives me an error.
{ MongoError: Cursor not found, cursor id: 62783806111
at Function.MongoError.create (/home/admin/Pictures/duplicayProj1/node_modules/mongoose/node_modules/mongodb-core/lib/error.js:31:11)
at /home/admin/Pictures/duplicayProj1/node_modules/mongoose/node_modules/mongodb-core/lib/connection/pool.js:483:72
at authenticateStragglers (/home/admin/Pictures/duplicayProj1/node_modules/mongoose/node_modules/mongodb-core/lib/connection/pool.js:429:16)
at Connection.messageHandler (/home/admin/Pictures/duplicayProj1/node_modules/mongoose/node_modules/mongodb-core/lib/connection/pool.js:463:5)
at Socket.<anonymous> (/home/admin/Pictures/duplicayProj1/node_modules/mongoose/node_modules/mongodb-core/lib/connection/connection.js:339:20)
at emitOne (events.js:115:13)
at Socket.emit (events.js:210:7)
at addChunk (_stream_readable.js:252:12)
at readableAddChunk (_stream_readable.js:239:11)
at Socket.Readable.push (_stream_readable.js:197:10)
at TCP.onread (net.js:589:20)
name: 'MongoError',
message: 'Cursor not found, cursor id: 62783806111',
ok: 0,
errmsg: 'Cursor not found, cursor id: 62783806111',
code: 43 }
can any one tell me what's the actual problem because i am not using any keywords matches with cursor.
Thanks in advance
This normally happens because your cursor timeouts if it is idle for too long. Check out noCursorTimeout. Just make sure you close the cursor when you are finished.

Getting the widths of a lot of images causes ENOTFOUND in Node.js

I have several sources of lists of images (flicker, images stored at s3, imgur, etc)
I want to get the dimenions of these images.
I use node and https://github.com/nodeca/probe-image-size to go over each url and use that to get the width of the image and count how many images are at a certain width via the following code
probes = [];
_.forEach(image_urls, url => {
probes.push(probe(url));
});
results = await Promise.all(probes);
_.forEach(results, result_of_image => {
width = parseInt(result_of_image.width / 10) * 10;
if (!widthes[width]) {
widthes[width] = 1;
} else {
widthes[width]++;
}
});
even though all urls are accessible, I sometimes get getaddrinfo ENOTFOUND with the stack
at ClientRequest.req.once.err (/image_script/node_modules/got/index.js:73:21)
at Object.onceWrapper (events.js:293:19)
at emitOne (events.js:101:20)
at ClientRequest.emit (events.js:191:7)
at TLSSocket.socketErrorListener (_http_client.js:358:9)
at emitOne (events.js:96:13)
at TLSSocket.emit (events.js:191:7)
at connectErrorNT (net.js:1031:8)
at _combinedTickCallback (internal/process/next_tick.js:80:11)
at process._tickDomainCallback (internal/process/next_tick.js:128:9)
I suspect that because the url list is very large (in the thousands) that node just takes all resources of the system and things just stop working properly (this is a guess)
Is there a better way to do the above? or provide node with some connection pool?

MongoDB: Server sockets closed after a few minutes

I am working with multiple AWS intances connected to the same mongo database (inside Compose.io Elastic deployment) but I keep getting the error server <url>:<port> sockets closed after a few minutes. Can anyone give me any hint about what may be wrong with the connection code?
CONNECTION CODE
var url = "mongodb://<user>:<password>#<url1>:<port1>,<url2>:<port2>/<dbName>?replicaSet=<replicaSetName>";
var options = {
server : {"socketOptions.keepAlive": 1},
replSet : { "replicaSet": <replicaSetName>, "socketOptions.keepAlive": 1 }
};
MongoClient.connect(url, options, function(err, db) { ... });
ERROR MESSAGE
Potentially unhandled rejection [2] MongoError: server <url>:<port> sockets closed
at null. (/var/app/current/node_modules/mongodb/node_modules/mongodb-core/lib/topologies/server.js:328:47)
at g (events.js:199:16)
at emit (events.js:110:17)
at null. (/var/app/current/node_modules/mongodb/node_modules/mongodb-core/lib/connection/pool.js:101:12)
at g (events.js:199:16)
at emit (events.js:110:17)
at Socket. (/var/app/current/node_modules/mongodb/node_modules/mongodb-core/lib/connection/connection.js:142:12)
at Socket.g (events.js:199:16)
at Socket.emit (events.js:107:17)
at TCP.close (net.js:485:12)

NodeLoad module gives Error : Object #<Client> has no method 'destroy'

I am using NodeLoad module of nodejs for sending multiple request at same time for testing server node app, but if i use numUsers : 50 than it's work perfectly for me. When I take numUsers : 300 than Gives me Error Like : TypeError: Object #<Client> has no method 'destroy'
NodeLoad App I used
timeLimit: 10,
targetRps: 5,
numUsers : 300,
Error :
},reconnect=function(){var oldclient=client;if(oldclient){oldclient.destroy();
^
TypeError: Object #<Client> has no method 'destroy'
at reconnect (/root/nodeLoadDemo/nodeload/node_modules/nodeload/nodeload.js:9:506)
at Client.<anonymous> (/root/nodeLoadDemo/nodeload/node_modules/nodeload/nodeload.js:10:180)
at Client.EventEmitter.emit (events.js:117:20)
at ClientRequest.<anonymous> (http.js:2144:10)
at ClientRequest.EventEmitter.emit (events.js:95:17)
at Socket.socketOnEnd [as onend] (http.js:1568:9)
at Socket.g (events.js:180:16)
at Socket.EventEmitter.emit (events.js:117:20)
at _stream_readable.js:919:16
at process._tickCallback (node.js:419:13)
So, can any one Explain me How Error ? And Solution for these Error ?
Thank you .
I got an Answer
Click Here To go on Solution
You Just need to change in node modules. open nodeload modules and nodeload.js file check line 9 at end of sentence replace
if (oldclient) { oldclient.destroy(); }
TO
if (oldclient && oldclient.destroy) { oldclient.destroy(); }

Resources