Mongodb 4.0 bulkWrite is extremely slow while updating - node.js

I have a collection with more than 1 million user, I'm trying to update
users balance on some event.
while I'm trying to update e.g. 299 row it takes up to 15739.901ms
no high load on the sever, it's just mongo running. I'm storing the database on an SSD Samsung evo 860 but MongoDB installed on an HDD.
Here's my function:
async usersUpdate(usersToUpdate){
const updates = [];
return new Promise(async (resolve, reject) => {
users.forEach(user=>{
updates.push(
{ "updateOne": {
"filter": { "userID": user.userID, 'balance':user.userBalance },
"update": { "$set": { "user.$.userBalance": user.newBalance } , "$addToSet":{'orders.$.orderID':user.OrderID} }
}
});
}
console.log('total updates' , updates.length);
if (updates.length > 0){
const DbConnection = await getConnection();
const usersTable = DbConnection.collection('usersCollection');
transactionsTable.bulkWrite(updates, {"ordered": false, writeConcern : { w : 0 } }, function(err, result) {
// do something with result
if (err) return reject(err);
return resolve(result)
});
}else{
return resolve('Nothing to update');
}
});
}
both userID and userBalance are indexed, and I set writeconcern equals to false.
I don't know what's the wrong with code and why it's super slow.
What's the problem and how could I speed up the progress a bit?
Mongodb config file:
storage:
dbPath: "/ssd/mongodb"
journal:
enabled: false
Explain result:
{ queryPlanner:
{ plannerVersion: 1,
namespace: 'usersDB.usersCollection',
indexFilterSet: false,
parsedQuery:
{ '$and':
[ { userID:
{ '$eq': 'Kfasg3ffasg' } },
{ 'user.userBalance': { '$eq': 10 } } ] },
winningPlan:
{ stage: 'FETCH',
filter: { 'user.userBalance': { '$eq': 10 } },
inputStage:
{ stage: 'IXSCAN',
keyPattern: { userID: 1 },
indexName: 'userID_1',
isMultiKey: false,
multiKeyPaths: { userID: [] },
isUnique: true,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds:
{ userID:
[ '["Kfasg3ffasg", "Kfasg3ffasg"]' ] } } },
rejectedPlans: [] },
executionStats:
{ executionSuccess: true,
nReturned: 1,
executionTimeMillis: 24,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages:
{ stage: 'FETCH',
filter: { 'user.userBalance': { '$eq': 10 } },
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage:
{ stage: 'IXSCAN',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
keyPattern: { userID: 1 },
indexName: 'userID_1',
isMultiKey: false,
multiKeyPaths: { userID: [] },
isUnique: true,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds:
{ userID:
[ '["Kfasg3ffasg", "Kfasg3ffasg"]' ] },
keysExamined: 1,
seeks: 1,
dupsTested: 0,
dupsDropped: 0,
seenInvalidated: 0 } },
allPlansExecution: [] },
serverInfo:
{ }

Related

MongoDB driver slow performance for node.js

I have an issue with slow data fetch. I have the following query to fetch the data
const query1 = this._ctx.signals.find({
user_id: user._id,
'spell.id': null,
'metadata.0.spell_id': { $in: spellsIds }
}).hint({user_id: 1, 'spell.id': 1, 'metadata.0.spell_id': 1}).explain('allPlansExecution')
And the execution time according to explain is 35ms. Here is explain object
{
queryPlanner: {
plannerVersion: 1,
namespace: 'gringotts.Signals',
indexFilterSet: false,
parsedQuery: { ... },
winningPlan: {
stage: 'FETCH',
inputStage: {
stage: 'IXSCAN',
keyPattern: {
user_id: 1,
'spell.id': 1
},
indexName: 'user_id_1_spell.id_1',
isMultiKey: false,
multiKeyPaths: {
user_id: [],
'spell.id': []
},
isUnique: false,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds: { ... }
}
},
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 23866,
executionTimeMillis: 35,
totalKeysExamined: 23869,
totalDocsExamined: 23866,
executionStages: {
stage: 'FETCH',
nReturned: 23866,
executionTimeMillisEstimate: 1,
works: 23869,
advanced: 23866,
needTime: 2,
needYield: 0,
saveState: 23,
restoreState: 23,
isEOF: 1,
docsExamined: 23866,
alreadyHasObj: 0,
inputStage: {
stage: 'IXSCAN',
nReturned: 23866,
executionTimeMillisEstimate: 1,
works: 23869,
advanced: 23866,
needTime: 2,
needYield: 0,
saveState: 23,
restoreState: 23,
isEOF: 1,
keyPattern: {
user_id: 1,
'spell.id': 1
},
indexName: 'user_id_1_spell.id_1',
isMultiKey: false,
multiKeyPaths: {
user_id: [],
'spell.id': []
},
isUnique: false,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds: { ... },
keysExamined: 23869,
seeks: 3,
dupsTested: 0,
dupsDropped: 0
}
},
allPlansExecution: []
},
serverInfo: {
host: 'ip-192-168-1-98.ec2.internal',
port: 27017,
version: '4.4.4',
gitVersion: '8db30a63db1a9d84bdcad0c83369623f708e0397'
},
ok: 1
}
When I try to fetch data with the following piece of code I have execution time starting from 750ms to 900ms (21x). Average document size is 544.6135124888154 bytes.
console.time('q1-time')
const q1 = await this._ctx.signals.find({
user_id: user._id,
'spell.id': {
$in: spellsIds
}
// #ts-ignore
}).hint({
user_id: 1,
'spell.id': 1
})
const f = (q) => {
const result = []
return new Promise((res, rej) => {
q.stream().on('end', function() {
console.log('done processing stream')
res(result)
})
q.stream().on('data', (d) => {
result.push(d)
})
})
}
const data = await f(q1)
console.timeEnd('q1-time') -- > q1 - time 769.511 ms
I tried different approaches: .toArray, iteration via cursor and the one with streams (I posted above) is the fastest).
Why it takes so much longer to get the data? Can it be optimized somehow?

MongoDb Node.js slow concurrent queries

When I run concurrent MongoDb queries using Node.js, the second query always takes ~2 seconds to return. Using explain(), executionTimeMillis always returns 0ms, which is absolutely normal as my test collection has only 2 entries. Here's my reduced testcase:
'use strict'
const { MongoClient } = require('mongodb')
const main = async () => {
const client = new MongoClient('mongodb://admin:123456#localhost:27017/', {
useNewUrlParser: true,
useUnifiedTopology: true,
})
await client.connect()
const db = client.db('test')
const numbers = db.collection('numbers')
const promises = []
console.time()
for (let i = 0; i < 3; i++) {
promises.push(numbers.find({ number: i }).explain())
}
for (const promise of promises) {
console.log(await promise)
console.timeLog()
}
console.timeEnd()
await client.close()
}
main()
Output:
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 1,
executionTimeMillis: 0,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages: {
stage: 'FETCH',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 32.252ms
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 1,
executionTimeMillis: 0,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages: {
stage: 'FETCH',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 2042.929ms
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 0,
executionTimeMillis: 0,
totalKeysExamined: 0,
totalDocsExamined: 0,
executionStages: {
stage: 'FETCH',
nReturned: 0,
executionTimeMillisEstimate: 0,
works: 1,
advanced: 0,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 0,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 2062.851ms
default: 2063.513ms
If I run queries consequentially, each query takes only some milliseconds to return. Then why is the 2 seconds response time?
Edit:
In the first for loop, I made/ran "concurrent" queries promises.push(numbers.find({ number: i }).explain()). In the second for loop, I wait for promises to resolve one after another but that doesn't mean that a promise must wait till the previous one resolved to begin its job.
To avoid misunderstandings, I've made a little changes to my code, replacing the two for loops with this:
for (let i = 0; i < 3; i++) {
promises.push(
numbers
.find({ number: i })
.explain()
.then(result => {
// console.log(result)
console.log('query index:', i)
console.timeLog()
})
)
}
await Promise.all(promises)
Output:
query index: 0
default: 22.040ms
query index: 2
default: 2032.921ms
query index: 1
default: 2034.682ms
default: 2035.260ms
Edit 2:
For further clarification, I use labels to denote timers.
for (let i = 0; i < 3; i++) {
console.time(`query index: ${ i }`)
promises.push(
numbers
.find({ number: i })
.explain()
.then(result => {
// console.log(result)
console.timeEnd(`query index: ${ i }`)
})
)
}
await Promise.all(promises)
Output:
query index: 0: 12.692ms
query index: 1: 2015.143ms
query index: 2: 2015.310ms
Set MongoClient's poolSize to 1.

Response rows: [ anonymous { today: 2017-08-14T00:08:52.643Z } ] - Node version v8.2.1

i'm working with node-postgres and try to make the first example
And create a Class called Postgres with a method today
const conn = new pg.Pool({
user: 'user',
host: 'localhost',
database: 'test',
password: 'pass',
port: 5432,
});
class Postgres {
today() {
conn.query("SELECT NOW() as today",(err,res)=>{
if (err) throw err
//console.log(JSON.stringify(res.rows[0]));
return JSON.stringify(res.rows[0]);
conn.end();
})
}
}
The result is that rows: [ anonymous { today: 2017-08-14T00:08:52.643Z } ] and try with JSON.stringify(res.rows[0]) but not working i can't get the field
Result {
command: 'SELECT',
rowCount: 1,
oid: NaN,
rows: [ anonymous { today: 2017-08-14T00:08:52.643Z } ],
fields:
[ Field {
name: 'today',
tableID: 0,
columnID: 0,
dataTypeID: 1184,
dataTypeSize: 8,
dataTypeModifier: -1,
format: 'text' } ],
_parsers: [ [Function: parseDate] ],
RowCtor: [Function: anonymous],
rowAsArray: false,
_getTypeParser: [Function: bound ] }
Any sugestion be apreciated
Try res.rows[0].today this will work!

How to parse jolokia response into Elasticsearch using logstash

I'm trying to create a logstash pipeline that polls a ActiveMQ jolokia endpoint. I'm wanting to collect all the metrics for the queues on the broker. I have the following pipeline.
input {
http_poller {
urls => {
health_metrics => {
method => "get"
url => "http://localhost:8161/api/jolokia/read/org.apache.activemq:type=Broker,brokerName=localhost,destinationType=Queue,destinationName=*"
headers => {
"Content-Type" => "application/json"
}
auth => {
user => "admin"
password => "admin"
}
}
}
request_timeout => 30
keepalive => false
interval => 5
codec => "json"
type => "activemq_broker_queue"
}
}
filter {
json_encode {
source => "value"
}
json {
source => "value"
}
mutate {
remove_field => ["request", "value", "timestamp"]
}
}
output {
elasticsearch {
hosts => "localhost"
# An index is created for each type of metrics inpout
index => "logstash-activmq"
document_type => "%{type}"
}
stdout {
codec => rubydebug
}
}
My jolokia response is in this format.
{
request: {
mbean: "org.apache.activemq:brokerName=localhost,destinationName=*,destinationType=Queue,type=Broker",
type: "read"
},
value: {
org.apache.activemq: brokerName=localhost,
destinationName=SEARCH,
destinationType=Queue,
type=Broker: {
ProducerFlowControl: true,
Options: "",
AlwaysRetroactive: false,
MemoryUsageByteCount: 0,
AverageBlockedTime: 0,
MemoryPercentUsage: 0,
CursorMemoryUsage: 0,
InFlightCount: 0,
Subscriptions: [],
CacheEnabled: true,
ForwardCount: 0,
DLQ: false,
StoreMessageSize: 0,
AverageEnqueueTime: 0,
Name: "SEARCH",
BlockedSends: 0,
TotalBlockedTime: 0,
MaxAuditDepth: 2048,
QueueSize: 0,
MaxPageSize: 200,
PrioritizedMessages: false,
MemoryUsagePortion: 1,
Paused: false,
EnqueueCount: 0,
MessageGroups: {
},
ConsumerCount: 0,
AverageMessageSize: 0,
CursorFull: false,
ExpiredCount: 0,
MaxProducersToAudit: 1024,
CursorPercentUsage: 0,
MinEnqueueTime: 0,
MemoryLimit: 668309914,
MinMessageSize: 0,
DispatchCount: 0,
MaxEnqueueTime: 0,
DequeueCount: 0,
BlockedProducerWarningInterval: 30000,
ProducerCount: 0,
MessageGroupType: "cached",
MaxMessageSize: 0,
UseCache: true,
SlowConsumerStrategy: null
},
org.apache.activemq: brokerName=localhost,
destinationName=weather,
destinationType=Queue,
type=Broker: {
ProducerFlowControl: true,
Options: "",
AlwaysRetroactive: false,
MemoryUsageByteCount: 0,
AverageBlockedTime: 0,
MemoryPercentUsage: 0,
CursorMemoryUsage: 0,
InFlightCount: 0,
Subscriptions: [],
CacheEnabled: true,
ForwardCount: 0,
DLQ: false,
StoreMessageSize: 0,
AverageEnqueueTime: 0,
Name: "weather",
BlockedSends: 0,
TotalBlockedTime: 0,
MaxAuditDepth: 2048,
QueueSize: 0,
MaxPageSize: 200,
PrioritizedMessages: false,
MemoryUsagePortion: 1,
Paused: false,
EnqueueCount: 0,
MessageGroups: {
},
ConsumerCount: 0,
AverageMessageSize: 0,
CursorFull: false,
ExpiredCount: 0,
MaxProducersToAudit: 1024,
CursorPercentUsage: 0,
MinEnqueueTime: 0,
MemoryLimit: 668309914,
MinMessageSize: 0,
DispatchCount: 0,
MaxEnqueueTime: 0,
DequeueCount: 0,
BlockedProducerWarningInterval: 30000,
ProducerCount: 0,
MessageGroupType: "cached",
MaxMessageSize: 0,
UseCache: true,
SlowConsumerStrategy: null
}
},
timestamp: 1453588727,
status: 200
}
I would like to be able to split the two queue destinations into two different documents and then save them to ES.
Currently I'm get an error about cannot contain '.'

NodeJs-MySQL object property undefined

I don't know why I can't access my property.
connection.query("call VerifyAccountToken(0, null)", function(err, rows, fields) {
if(err) console.log("Error: " + err);
console.log("SQLRet: ", rows[0].result);
console.log(rows);
console.log(fields);
});
VerifyAccountToken returns a single row/column result with the column named 'result'.
The console outputs the following:
SQLRet: undefined
[ [ { result: 0 } ],
{ fieldCount: 0,
affectedRows: 0,
insertId: 0,
serverStatus: 2,
warningCount: 1,
message: '',
protocol41: true,
changedRows: 0 } ]
[ [ { catalog: 'def',
db: '',
table: '',
orgTable: '',
name: 'result',
orgName: 'iRes',
filler1: ,
charsetNr: 63,
length: 11,
type: 3,
flags: 0,
decimals: 0,
filler2: ,
default: undefined,
zeroFill: false,
protocol41: true } ],
undefined ]
Everything I know tells me this should work.
Ok apparently I needed to use
rows[0][0].result;
I'm not sure why node-mysql nests stored procedures' returned results.

Resources