How to parse jolokia response into Elasticsearch using logstash - logstash

I'm trying to create a logstash pipeline that polls a ActiveMQ jolokia endpoint. I'm wanting to collect all the metrics for the queues on the broker. I have the following pipeline.
input {
http_poller {
urls => {
health_metrics => {
method => "get"
url => "http://localhost:8161/api/jolokia/read/org.apache.activemq:type=Broker,brokerName=localhost,destinationType=Queue,destinationName=*"
headers => {
"Content-Type" => "application/json"
}
auth => {
user => "admin"
password => "admin"
}
}
}
request_timeout => 30
keepalive => false
interval => 5
codec => "json"
type => "activemq_broker_queue"
}
}
filter {
json_encode {
source => "value"
}
json {
source => "value"
}
mutate {
remove_field => ["request", "value", "timestamp"]
}
}
output {
elasticsearch {
hosts => "localhost"
# An index is created for each type of metrics inpout
index => "logstash-activmq"
document_type => "%{type}"
}
stdout {
codec => rubydebug
}
}
My jolokia response is in this format.
{
request: {
mbean: "org.apache.activemq:brokerName=localhost,destinationName=*,destinationType=Queue,type=Broker",
type: "read"
},
value: {
org.apache.activemq: brokerName=localhost,
destinationName=SEARCH,
destinationType=Queue,
type=Broker: {
ProducerFlowControl: true,
Options: "",
AlwaysRetroactive: false,
MemoryUsageByteCount: 0,
AverageBlockedTime: 0,
MemoryPercentUsage: 0,
CursorMemoryUsage: 0,
InFlightCount: 0,
Subscriptions: [],
CacheEnabled: true,
ForwardCount: 0,
DLQ: false,
StoreMessageSize: 0,
AverageEnqueueTime: 0,
Name: "SEARCH",
BlockedSends: 0,
TotalBlockedTime: 0,
MaxAuditDepth: 2048,
QueueSize: 0,
MaxPageSize: 200,
PrioritizedMessages: false,
MemoryUsagePortion: 1,
Paused: false,
EnqueueCount: 0,
MessageGroups: {
},
ConsumerCount: 0,
AverageMessageSize: 0,
CursorFull: false,
ExpiredCount: 0,
MaxProducersToAudit: 1024,
CursorPercentUsage: 0,
MinEnqueueTime: 0,
MemoryLimit: 668309914,
MinMessageSize: 0,
DispatchCount: 0,
MaxEnqueueTime: 0,
DequeueCount: 0,
BlockedProducerWarningInterval: 30000,
ProducerCount: 0,
MessageGroupType: "cached",
MaxMessageSize: 0,
UseCache: true,
SlowConsumerStrategy: null
},
org.apache.activemq: brokerName=localhost,
destinationName=weather,
destinationType=Queue,
type=Broker: {
ProducerFlowControl: true,
Options: "",
AlwaysRetroactive: false,
MemoryUsageByteCount: 0,
AverageBlockedTime: 0,
MemoryPercentUsage: 0,
CursorMemoryUsage: 0,
InFlightCount: 0,
Subscriptions: [],
CacheEnabled: true,
ForwardCount: 0,
DLQ: false,
StoreMessageSize: 0,
AverageEnqueueTime: 0,
Name: "weather",
BlockedSends: 0,
TotalBlockedTime: 0,
MaxAuditDepth: 2048,
QueueSize: 0,
MaxPageSize: 200,
PrioritizedMessages: false,
MemoryUsagePortion: 1,
Paused: false,
EnqueueCount: 0,
MessageGroups: {
},
ConsumerCount: 0,
AverageMessageSize: 0,
CursorFull: false,
ExpiredCount: 0,
MaxProducersToAudit: 1024,
CursorPercentUsage: 0,
MinEnqueueTime: 0,
MemoryLimit: 668309914,
MinMessageSize: 0,
DispatchCount: 0,
MaxEnqueueTime: 0,
DequeueCount: 0,
BlockedProducerWarningInterval: 30000,
ProducerCount: 0,
MessageGroupType: "cached",
MaxMessageSize: 0,
UseCache: true,
SlowConsumerStrategy: null
}
},
timestamp: 1453588727,
status: 200
}
I would like to be able to split the two queue destinations into two different documents and then save them to ES.
Currently I'm get an error about cannot contain '.'

Related

MongoDB driver slow performance for node.js

I have an issue with slow data fetch. I have the following query to fetch the data
const query1 = this._ctx.signals.find({
user_id: user._id,
'spell.id': null,
'metadata.0.spell_id': { $in: spellsIds }
}).hint({user_id: 1, 'spell.id': 1, 'metadata.0.spell_id': 1}).explain('allPlansExecution')
And the execution time according to explain is 35ms. Here is explain object
{
queryPlanner: {
plannerVersion: 1,
namespace: 'gringotts.Signals',
indexFilterSet: false,
parsedQuery: { ... },
winningPlan: {
stage: 'FETCH',
inputStage: {
stage: 'IXSCAN',
keyPattern: {
user_id: 1,
'spell.id': 1
},
indexName: 'user_id_1_spell.id_1',
isMultiKey: false,
multiKeyPaths: {
user_id: [],
'spell.id': []
},
isUnique: false,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds: { ... }
}
},
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 23866,
executionTimeMillis: 35,
totalKeysExamined: 23869,
totalDocsExamined: 23866,
executionStages: {
stage: 'FETCH',
nReturned: 23866,
executionTimeMillisEstimate: 1,
works: 23869,
advanced: 23866,
needTime: 2,
needYield: 0,
saveState: 23,
restoreState: 23,
isEOF: 1,
docsExamined: 23866,
alreadyHasObj: 0,
inputStage: {
stage: 'IXSCAN',
nReturned: 23866,
executionTimeMillisEstimate: 1,
works: 23869,
advanced: 23866,
needTime: 2,
needYield: 0,
saveState: 23,
restoreState: 23,
isEOF: 1,
keyPattern: {
user_id: 1,
'spell.id': 1
},
indexName: 'user_id_1_spell.id_1',
isMultiKey: false,
multiKeyPaths: {
user_id: [],
'spell.id': []
},
isUnique: false,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds: { ... },
keysExamined: 23869,
seeks: 3,
dupsTested: 0,
dupsDropped: 0
}
},
allPlansExecution: []
},
serverInfo: {
host: 'ip-192-168-1-98.ec2.internal',
port: 27017,
version: '4.4.4',
gitVersion: '8db30a63db1a9d84bdcad0c83369623f708e0397'
},
ok: 1
}
When I try to fetch data with the following piece of code I have execution time starting from 750ms to 900ms (21x). Average document size is 544.6135124888154 bytes.
console.time('q1-time')
const q1 = await this._ctx.signals.find({
user_id: user._id,
'spell.id': {
$in: spellsIds
}
// #ts-ignore
}).hint({
user_id: 1,
'spell.id': 1
})
const f = (q) => {
const result = []
return new Promise((res, rej) => {
q.stream().on('end', function() {
console.log('done processing stream')
res(result)
})
q.stream().on('data', (d) => {
result.push(d)
})
})
}
const data = await f(q1)
console.timeEnd('q1-time') -- > q1 - time 769.511 ms
I tried different approaches: .toArray, iteration via cursor and the one with streams (I posted above) is the fastest).
Why it takes so much longer to get the data? Can it be optimized somehow?

Nodejs Xml to Json

I am using fast-xml-parser for coverting xml to json
var parser = require('fast-xml-parser');
var he = require('he');
var fs = require("fs");
var util = require("util");
var options = {
attributeNamePrefix : "#",
attrNodeName: "attr",
textNodeName : "#text",
ignoreAttributes : true,
ignoreNameSpace : true,
allowBooleanAttributes : false,
parseNodeValue : true,
parseAttributeValue : false,
trimValues: true,
cdataTagName: "__cdata", //default is 'false'
cdataPositionChar: "\\c",
parseTrueNumberOnly: false,
arrayMode: false, //"strict"
attrValueProcessor: (val, attrName) => he.decode(val, {isAttributeValue: true}),
tagValueProcessor : (val, tagName) => he.decode(val),
stopNodes: ["parse-me-as-string"]
};
var tObj = parser.getTraversalObj(xml,options);
var jsonObj = (parser.convertToJson(tObj,options));
var lastJson = util.inspect(jsonObj, false, null, true);
console.log(lastJson);
My Output this but I dont want see root and elements how can I fix?
{
root: {
BIREYSEL: {
bkBildirimdeBulunanFinansKurulusuSayisi: 2,
bkEnEskTakibeAlinmaTarihi: '',
bkExclusionCode: '',
bkGecikmedekiToplamHesapSayisi: 0,
bkGeciktirdigiBakiyeToplami: 0,
bkHesapDokumuBeans: {
bkHesap: {
element: [
{
bkAcilisTarihi: 20190716,
bkDovizKodu: 'TL',
bkEnSonGuncellemeTarihi: 20190915,
bkGecikmedekiBakiye: 0,
bkHesaptakiKonumu: 1,
bkKapanisTarihi: { element: '' },
bkKayitReferansNo: '7027461580078B',
bkKrediKartiTaksitliBakiye: 263,
bkKrediTuru: 23,
bkKrediTutariLimiti: 12250,
bkKurumRumuzu: 'A',
bkLimitKullanimOrani: 0.09,
bkOdemePerformansiTarihcesi: 0,
bkSiraNo: 1,
bkTakibeAlinmaBakiyesi: 0,
bkTakipTarihi: '',
bkToplamBakiye: 1026,
bkToplamGeciktirilmisOdemeSayisi: 0,
bkTuketiciKredisiTaksitSayisi: '',
bkTuketiciKredisiTaksitTutari: ''
},
{
bkAcilisTarihi: 20110914,
bkDovizKodu: 'TL',
bkEnSonGuncellemeTarihi: 20190913,
bkGecikmedekiBakiye: 0,
bkHesaptakiKonumu: 1,
bkKapanisTarihi: { element: '' },
bkKayitReferansNo: '5670737591250I',
bkKrediKartiTaksitliBakiye: 0,
bkKrediTuru: 23,
bkKrediTutariLimiti: 3500,
bkKurumRumuzu: 'B',
bkLimitKullanimOrani: 0,
bkOdemePerformansiTarihcesi: 0,
bkSiraNo: 2,
bkTakibeAlinmaBakiyesi: 0,
bkTakipTarihi: '',
bkToplamBakiye: 0,
bkToplamGeciktirilmisOdemeSayisi: { element: '' },
bkTuketiciKredisiTaksitSayisi: '',
bkTuketiciKredisiTaksitTutari: ''
}
]
}
},
bkKrediNotu: 1369,
bkKrediNotuSebepKodu1: 12,
bkKrediNotuSebepKodu2: 6,
bkKrediNotuSebepKodu3: 9,
bkKrediNotuSebepKodu4: 17,
bkMevcutEnUzunGecikmeSuresi: 0,
bkSonKrediKullandirimTarihi: 20190716,
bkSorguNo: 72626513,
bkTakibiDevamEdenKrediSayisi: 0,
bkTakipBildirimdeBulunanFinansKurulusuSayisi: 0,
bkToplamKrediliHesapSayisi: 2,
bkToplamLimit: 15750,
bkToplamRisk: 1026,
bkToplamTakibeAlinmisKrediSayisi: 0,
bkToplamTakipBakiyesi: 0,
bkWorstPaymetStatusEver: 0,
krsSonucu: 0,
krsVeriBulunduMu: true
},
TICARI: {
krmSonucu: 1,
krmVeriBulunduMu: false,
tkBildirimdeBulunanFinansKurulusuSayisi: 0,
tkEnEskiTakipTarihi: '',
tkEnGuncelLimitTahsisTarihi: '',
tkEnYakinTakipTarihi: '',
tkGecikmedekiHesapSayisi: 0,
tkGecikmisBakiyeToplami: 0,
tkGuncelKrediBakiyesiToplami: 0,
tkIlkKrediKullandirimTarihi: '',
tkLimitRiskBeans: '',
tkSonKrediKullandirimTarihi: '',
tkTakFaktoringBildirimDonemi: '',
tkTakFaktoringCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTakFaktoringFaizReeskontKomisyon: 0,
tkTakFaktoringFaizTahakkukuKomisyon: 0,
tkTakFaktoringKrediLimiti: 0,
tkTakFaktoring_12_24_Ay_Vadeli: 0,
tkTakFaktoring_1_12_Ay_Vadeli: 0,
tkTakFaktoring_24_Ay_Vadeli: 0,
tkTakLeasingBildirimDonemi: '',
tkTakLeasingCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTakLeasingFaizReeskontKomisyon: 0,
tkTakLeasingFaizTahakkukuKomisyon: 0,
tkTakLeasingKrediLimiti: 0,
tkTakLeasing_12_24_Ay_Vadeli: 0,
tkTakLeasing_1_12_Ay_Vadeli: 0,
tkTakLeasing_24_Ay_Vadeli: 0,
tkTakibeAlindigiTarihtekiRiskTutarlariToplami: 0,
tkTakipHesabiBildirimdeBulunanFinansKurulusuSayisi: 0,
tkTokFaktoringBildirimDonemi: '',
tkTokFaktoringCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTokFaktoringFaizReeskontKomisyon: 0,
tkTokFaktoringFaizTahakkukuKomisyon: 0,
tkTokFaktoringKrediLimiti: 0,
tkTokFaktoring_12_24_Ay_Vadeli: 0,
tkTokFaktoring_1_12_Ay_Vadeli: 0,
tkTokFaktoring_24_Ay_Vadeli: 0,
tkTokLeasingBildirimDonemi: '',
tkTokLeasingCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTokLeasingFaizReeskontKomisyon: 0,
tkTokLeasingFaizTahakkukuKomisyon: 0,
tkTokLeasingKrediLimiti: 0,
tkTokLeasing_12_24_Ay_Vadeli: 0,
tkTokLeasing_1_12_Ay_Vadeli: 0,
tkTokLeasing_24_Ay_Vadeli: 0,
tkToplamDigerLimit: '',
tkToplamDigerRisk: 0,
tkToplamGayriNakdiLimit: 0,
tkToplamGayriNakdiRisk: 0,
tkToplamLimit: 0,
tkToplamNakdiLimit: 0,
tkToplamNakdiRisk: 0,
tkToplamRisk: 0
},
donusDegerleri: { hataKodu: 0, islemSonucu: 0 },
ersRaporTuru: 'D',
raporTur: 'FD',
referansNo: '603A796EB0',
varlikTuru: 'G'
}
}

MongoDb Node.js slow concurrent queries

When I run concurrent MongoDb queries using Node.js, the second query always takes ~2 seconds to return. Using explain(), executionTimeMillis always returns 0ms, which is absolutely normal as my test collection has only 2 entries. Here's my reduced testcase:
'use strict'
const { MongoClient } = require('mongodb')
const main = async () => {
const client = new MongoClient('mongodb://admin:123456#localhost:27017/', {
useNewUrlParser: true,
useUnifiedTopology: true,
})
await client.connect()
const db = client.db('test')
const numbers = db.collection('numbers')
const promises = []
console.time()
for (let i = 0; i < 3; i++) {
promises.push(numbers.find({ number: i }).explain())
}
for (const promise of promises) {
console.log(await promise)
console.timeLog()
}
console.timeEnd()
await client.close()
}
main()
Output:
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 1,
executionTimeMillis: 0,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages: {
stage: 'FETCH',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 32.252ms
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 1,
executionTimeMillis: 0,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages: {
stage: 'FETCH',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 2042.929ms
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 0,
executionTimeMillis: 0,
totalKeysExamined: 0,
totalDocsExamined: 0,
executionStages: {
stage: 'FETCH',
nReturned: 0,
executionTimeMillisEstimate: 0,
works: 1,
advanced: 0,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 0,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 2062.851ms
default: 2063.513ms
If I run queries consequentially, each query takes only some milliseconds to return. Then why is the 2 seconds response time?
Edit:
In the first for loop, I made/ran "concurrent" queries promises.push(numbers.find({ number: i }).explain()). In the second for loop, I wait for promises to resolve one after another but that doesn't mean that a promise must wait till the previous one resolved to begin its job.
To avoid misunderstandings, I've made a little changes to my code, replacing the two for loops with this:
for (let i = 0; i < 3; i++) {
promises.push(
numbers
.find({ number: i })
.explain()
.then(result => {
// console.log(result)
console.log('query index:', i)
console.timeLog()
})
)
}
await Promise.all(promises)
Output:
query index: 0
default: 22.040ms
query index: 2
default: 2032.921ms
query index: 1
default: 2034.682ms
default: 2035.260ms
Edit 2:
For further clarification, I use labels to denote timers.
for (let i = 0; i < 3; i++) {
console.time(`query index: ${ i }`)
promises.push(
numbers
.find({ number: i })
.explain()
.then(result => {
// console.log(result)
console.timeEnd(`query index: ${ i }`)
})
)
}
await Promise.all(promises)
Output:
query index: 0: 12.692ms
query index: 1: 2015.143ms
query index: 2: 2015.310ms
Set MongoClient's poolSize to 1.

Mongodb 4.0 bulkWrite is extremely slow while updating

I have a collection with more than 1 million user, I'm trying to update
users balance on some event.
while I'm trying to update e.g. 299 row it takes up to 15739.901ms
no high load on the sever, it's just mongo running. I'm storing the database on an SSD Samsung evo 860 but MongoDB installed on an HDD.
Here's my function:
async usersUpdate(usersToUpdate){
const updates = [];
return new Promise(async (resolve, reject) => {
users.forEach(user=>{
updates.push(
{ "updateOne": {
"filter": { "userID": user.userID, 'balance':user.userBalance },
"update": { "$set": { "user.$.userBalance": user.newBalance } , "$addToSet":{'orders.$.orderID':user.OrderID} }
}
});
}
console.log('total updates' , updates.length);
if (updates.length > 0){
const DbConnection = await getConnection();
const usersTable = DbConnection.collection('usersCollection');
transactionsTable.bulkWrite(updates, {"ordered": false, writeConcern : { w : 0 } }, function(err, result) {
// do something with result
if (err) return reject(err);
return resolve(result)
});
}else{
return resolve('Nothing to update');
}
});
}
both userID and userBalance are indexed, and I set writeconcern equals to false.
I don't know what's the wrong with code and why it's super slow.
What's the problem and how could I speed up the progress a bit?
Mongodb config file:
storage:
dbPath: "/ssd/mongodb"
journal:
enabled: false
Explain result:
{ queryPlanner:
{ plannerVersion: 1,
namespace: 'usersDB.usersCollection',
indexFilterSet: false,
parsedQuery:
{ '$and':
[ { userID:
{ '$eq': 'Kfasg3ffasg' } },
{ 'user.userBalance': { '$eq': 10 } } ] },
winningPlan:
{ stage: 'FETCH',
filter: { 'user.userBalance': { '$eq': 10 } },
inputStage:
{ stage: 'IXSCAN',
keyPattern: { userID: 1 },
indexName: 'userID_1',
isMultiKey: false,
multiKeyPaths: { userID: [] },
isUnique: true,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds:
{ userID:
[ '["Kfasg3ffasg", "Kfasg3ffasg"]' ] } } },
rejectedPlans: [] },
executionStats:
{ executionSuccess: true,
nReturned: 1,
executionTimeMillis: 24,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages:
{ stage: 'FETCH',
filter: { 'user.userBalance': { '$eq': 10 } },
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage:
{ stage: 'IXSCAN',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
keyPattern: { userID: 1 },
indexName: 'userID_1',
isMultiKey: false,
multiKeyPaths: { userID: [] },
isUnique: true,
isSparse: false,
isPartial: false,
indexVersion: 2,
direction: 'forward',
indexBounds:
{ userID:
[ '["Kfasg3ffasg", "Kfasg3ffasg"]' ] },
keysExamined: 1,
seeks: 1,
dupsTested: 0,
dupsDropped: 0,
seenInvalidated: 0 } },
allPlansExecution: [] },
serverInfo:
{ }

Data being modified during axios.get request

I am currently on a React project and the data being sent from the backend is being modified as it arrives on the frontend. I'm probably missing something really stupid but I cannot figure out where it's going bad for god's sake.
The frontend component
import React from 'react';
//.. import bunch of other things
const styles = theme => ({
root: {},
sectionTitle: {
margin: '20px 0 35px 0'
},
sectionTitleNoTop: {
margin: '0 0 35px 0'
},
gap50: {
margin: 50
}
});
class RankingsPage extends React.Component {
state = {
tableData: null,
tableDataEarnings: null,
barData: null,
barDataEarnings: null,
tableError: ''
};
constructor(props) {
super(props);
this.getBarData = this.getBarData.bind(this);
}
componentWillMount() {
this.getBarData(false); //first request
this.getBarData(true); //second request
}
async getBarData(earnings) {
let response = await axios.get(`/api/ranking/ranking_barchart?earnings=${earnings}`);
let {success, data, error} = response.data;
console.log(earnings);
console.log(data);
//The first request works fine. retrieves following
//[ { fill: true,
// backgroundColor: 'rgba(58, 79, 212, 0.4)',
// data: [ 11, 8, 8, 5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] },
// { fill: true,
// backgroundColor: 'rgba(239, 49, 81, 0.4)',
// data: [ 0, 0, 0, 0, 10, 0, 15, 6, 10, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0 ] } ]
//Second request gives this
//[ { fill: true,
// backgroundColor: 'rgba(58, 79, 212, 0.4)',
// data: [ 11, 8, 8, 5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] },
// { fill: true,
// backgroundColor: 'rgba(239, 49, 81, 0.4)',
// data: [ 11, 8, 8, 5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] } ]
//Notice that this part is different from the first
if (!success) {
return this.setState({barDataError: error});
}
if (!earnings) {
return this.setState({barData: data});
}
this.setState({barDataEarnings: data});
}
render() {
const {classes} = this.props;
const {tableData, tableDataEarnings, barData, barDataEarnings} = this.state;
return (
<div className={classes.root}>
//I'm using this.state.barData and this.state.barDataEarnings here - I don't think this code has anything to do with the problem but if you need it I'll provide
</div>
);
}
}
export default withStyles(styles)(RankingsPage)
The component makes EXACTLY the same request except that the query value is different in the url. However, the data of the second index object is different to that of the first response.
Now the more interesting thing is the backend.
app.get('/api/ranking/ranking_barchart', authorize, async (req, res) => {
try{
let earnings = req.query.earnings === 'true';
// let sortBy = earnings ? {totalReceived: -1} : {totalDonated: -1};
let sortBy = {totalReceived: -1};
let topUsersResult =
await User.find({})
.sort(sortBy)
.limit(20)
.lean()
.exec();
let returnBody = {
labels: [],
datasets: []
};
let donationsMade = {
fill: true,
backgroundColor:'rgba(239, 49, 81, 0.4)',
data: []
};
let donationsReceived = {
fill: true,
backgroundColor:'rgba(58, 79, 212, 0.4)',
data: []
};
for(let i = 0; i< topUsersResult.length; i++) {
let curResult = topUsersResult[i];
returnBody.labels.push(curResult.username);
donationsMade.data.push(
curResult.totalDonated ? curResult.totalDonated : 0
);
donationsReceived.data.push(
curResult.totalReceived ? curResult.totalReceived : 0
);
}
returnBody.datasets.push(donationsReceived);
returnBody.datasets.push(donationsMade);
console.log(returnBody.datasets); //Print here is EXACTLY the same for both requests as expected
res.json(API_MSG.successMsg(returnBody));
}catch(e){
res.json(API_MSG.errorMsg(e, MSG_TYPES.FIND_ERROR, true));
}
});
I have it to NOT use the earnings query to test out with the EXACT same response. The code above is almost unnecessary because the console.log value is exactly the same as shown below. The console.log commented on the code above prints the following
//First Request
[ { fill: true,
backgroundColor: 'rgba(58, 79, 212, 0.4)',
data: [ 11, 8, 8, 5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] },
{ fill: true,
backgroundColor: 'rgba(239, 49, 81, 0.4)',
data: [ 0, 0, 0, 0, 10, 0, 15, 6, 10, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0 ] } ]
//Second Request
[ { fill: true,
backgroundColor: 'rgba(58, 79, 212, 0.4)',
data: [ 11, 8, 8, 5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] },
{ fill: true,
backgroundColor: 'rgba(239, 49, 81, 0.4)',
data: [ 0, 0, 0, 0, 10, 0, 15, 6, 10, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0 ] } ]
PLEASE tell me I'm not crazy. How is the EXACT same response that is being returned from the server some how being modified while it's being transported to the frontend???

Resources