Nim Table is not updated - nim-lang

I'm pretty new to Nim.
I have this code:
type
Environment* = object
values*: Table[string, BaseType]
error*: Error
proc newEnv*(errorObj: Error): Environment =
return Environment(
values: initTable[string, BaseType](),
error: errorObj
)
proc define*(env: var Environment, name: string, value: BaseType) =
env.values[name] = value
echo(env.values)
proc get*(env: var Environment, name: Token): BaseType =
echo(env.values)
if env.values.hasKey(name.value):
return env.values[name.value]
else:
# do something...
type Interpreter* = object
error*: Error
env*: Environment
proc newInterpreter*(errorObj: Error): Interpreter =
return Interpreter(
error: errorObj,
env: newEnv(errorObj)
)
method eval*(self: var Interpreter, expre: Expr): BaseType {.base.} = discard
method eval*(self: var Interpreter, statement: Stmt) {.base.} = discard
method eval*(self: var Interpreter, expre: VariableExpr): BaseType =
return self.env.get(expre.name)
method eval*(self: var Interpreter, statement: VariableStmt) =
var value: BaseType
if not statement.init.isNil: value = self.eval(statement.init)
self.env.define(statement.name.value, value)
As you can see, when eval proc with VariableStmt is called, it calls define proc to bind the name with the value.
However, the two echo statements print this:
{"a": 123}
{:}
The first line is from define proc, and the second line is from get proc.
Even though it looks like "a" and 123 got saved from calling define proc, inside get proc, the values table is empty.
I am not sure why this is. Could you help me solve this?
I will provide more information if you need it.
Update
I put an echo statement just before return self.env.get(expre.name), and it prints out this:
(data: #[(hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil), (hcode: 0, key: "", val: nil)], counter: 0)
I'm not sure what this is. Is it a bug of Nim?

I tried to boil this down to a minimal working sample (always a good strategy when trying to track down a bug or get help). However this works:
import tables
type
Environment = object
table: Table[string, string]
Interpreter = object
env: Environment
proc define(env: var Environment, name, key: string) =
env.table[name] = key
echo env.table
proc get(env: var Environment, name: string): string =
echo env.table
env.table.getOrDefault(name, "")
proc eval(self: var Interpreter, command, name: string, value = "") =
if command == "ADD":
self.env.define(name, value)
elif command == "GET":
echo self.env.get(name)
var interpreter: Interpreter
interpreter.eval("ADD", "hello", "world")
interpreter.eval("GET", "hello")
This leads me to believe that you somewhere manage to make a copy of your interpreter or the environment as you pass it around. An easy mistake to make, but hard to tell where you made it or if it even is that without a more complete code sample (preferably something which runs of course).
EDIT: By the way, you probably don't need methods. You might, but it's quite a common beginner mistake to use them when you don't.

Related

Nodejs Xml to Json

I am using fast-xml-parser for coverting xml to json
var parser = require('fast-xml-parser');
var he = require('he');
var fs = require("fs");
var util = require("util");
var options = {
attributeNamePrefix : "#",
attrNodeName: "attr",
textNodeName : "#text",
ignoreAttributes : true,
ignoreNameSpace : true,
allowBooleanAttributes : false,
parseNodeValue : true,
parseAttributeValue : false,
trimValues: true,
cdataTagName: "__cdata", //default is 'false'
cdataPositionChar: "\\c",
parseTrueNumberOnly: false,
arrayMode: false, //"strict"
attrValueProcessor: (val, attrName) => he.decode(val, {isAttributeValue: true}),
tagValueProcessor : (val, tagName) => he.decode(val),
stopNodes: ["parse-me-as-string"]
};
var tObj = parser.getTraversalObj(xml,options);
var jsonObj = (parser.convertToJson(tObj,options));
var lastJson = util.inspect(jsonObj, false, null, true);
console.log(lastJson);
My Output this but I dont want see root and elements how can I fix?
{
root: {
BIREYSEL: {
bkBildirimdeBulunanFinansKurulusuSayisi: 2,
bkEnEskTakibeAlinmaTarihi: '',
bkExclusionCode: '',
bkGecikmedekiToplamHesapSayisi: 0,
bkGeciktirdigiBakiyeToplami: 0,
bkHesapDokumuBeans: {
bkHesap: {
element: [
{
bkAcilisTarihi: 20190716,
bkDovizKodu: 'TL',
bkEnSonGuncellemeTarihi: 20190915,
bkGecikmedekiBakiye: 0,
bkHesaptakiKonumu: 1,
bkKapanisTarihi: { element: '' },
bkKayitReferansNo: '7027461580078B',
bkKrediKartiTaksitliBakiye: 263,
bkKrediTuru: 23,
bkKrediTutariLimiti: 12250,
bkKurumRumuzu: 'A',
bkLimitKullanimOrani: 0.09,
bkOdemePerformansiTarihcesi: 0,
bkSiraNo: 1,
bkTakibeAlinmaBakiyesi: 0,
bkTakipTarihi: '',
bkToplamBakiye: 1026,
bkToplamGeciktirilmisOdemeSayisi: 0,
bkTuketiciKredisiTaksitSayisi: '',
bkTuketiciKredisiTaksitTutari: ''
},
{
bkAcilisTarihi: 20110914,
bkDovizKodu: 'TL',
bkEnSonGuncellemeTarihi: 20190913,
bkGecikmedekiBakiye: 0,
bkHesaptakiKonumu: 1,
bkKapanisTarihi: { element: '' },
bkKayitReferansNo: '5670737591250I',
bkKrediKartiTaksitliBakiye: 0,
bkKrediTuru: 23,
bkKrediTutariLimiti: 3500,
bkKurumRumuzu: 'B',
bkLimitKullanimOrani: 0,
bkOdemePerformansiTarihcesi: 0,
bkSiraNo: 2,
bkTakibeAlinmaBakiyesi: 0,
bkTakipTarihi: '',
bkToplamBakiye: 0,
bkToplamGeciktirilmisOdemeSayisi: { element: '' },
bkTuketiciKredisiTaksitSayisi: '',
bkTuketiciKredisiTaksitTutari: ''
}
]
}
},
bkKrediNotu: 1369,
bkKrediNotuSebepKodu1: 12,
bkKrediNotuSebepKodu2: 6,
bkKrediNotuSebepKodu3: 9,
bkKrediNotuSebepKodu4: 17,
bkMevcutEnUzunGecikmeSuresi: 0,
bkSonKrediKullandirimTarihi: 20190716,
bkSorguNo: 72626513,
bkTakibiDevamEdenKrediSayisi: 0,
bkTakipBildirimdeBulunanFinansKurulusuSayisi: 0,
bkToplamKrediliHesapSayisi: 2,
bkToplamLimit: 15750,
bkToplamRisk: 1026,
bkToplamTakibeAlinmisKrediSayisi: 0,
bkToplamTakipBakiyesi: 0,
bkWorstPaymetStatusEver: 0,
krsSonucu: 0,
krsVeriBulunduMu: true
},
TICARI: {
krmSonucu: 1,
krmVeriBulunduMu: false,
tkBildirimdeBulunanFinansKurulusuSayisi: 0,
tkEnEskiTakipTarihi: '',
tkEnGuncelLimitTahsisTarihi: '',
tkEnYakinTakipTarihi: '',
tkGecikmedekiHesapSayisi: 0,
tkGecikmisBakiyeToplami: 0,
tkGuncelKrediBakiyesiToplami: 0,
tkIlkKrediKullandirimTarihi: '',
tkLimitRiskBeans: '',
tkSonKrediKullandirimTarihi: '',
tkTakFaktoringBildirimDonemi: '',
tkTakFaktoringCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTakFaktoringFaizReeskontKomisyon: 0,
tkTakFaktoringFaizTahakkukuKomisyon: 0,
tkTakFaktoringKrediLimiti: 0,
tkTakFaktoring_12_24_Ay_Vadeli: 0,
tkTakFaktoring_1_12_Ay_Vadeli: 0,
tkTakFaktoring_24_Ay_Vadeli: 0,
tkTakLeasingBildirimDonemi: '',
tkTakLeasingCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTakLeasingFaizReeskontKomisyon: 0,
tkTakLeasingFaizTahakkukuKomisyon: 0,
tkTakLeasingKrediLimiti: 0,
tkTakLeasing_12_24_Ay_Vadeli: 0,
tkTakLeasing_1_12_Ay_Vadeli: 0,
tkTakLeasing_24_Ay_Vadeli: 0,
tkTakibeAlindigiTarihtekiRiskTutarlariToplami: 0,
tkTakipHesabiBildirimdeBulunanFinansKurulusuSayisi: 0,
tkTokFaktoringBildirimDonemi: '',
tkTokFaktoringCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTokFaktoringFaizReeskontKomisyon: 0,
tkTokFaktoringFaizTahakkukuKomisyon: 0,
tkTokFaktoringKrediLimiti: 0,
tkTokFaktoring_12_24_Ay_Vadeli: 0,
tkTokFaktoring_1_12_Ay_Vadeli: 0,
tkTokFaktoring_24_Ay_Vadeli: 0,
tkTokLeasingBildirimDonemi: '',
tkTokLeasingCalistigiBankVeDigerMaliKurulusSayisi: 0,
tkTokLeasingFaizReeskontKomisyon: 0,
tkTokLeasingFaizTahakkukuKomisyon: 0,
tkTokLeasingKrediLimiti: 0,
tkTokLeasing_12_24_Ay_Vadeli: 0,
tkTokLeasing_1_12_Ay_Vadeli: 0,
tkTokLeasing_24_Ay_Vadeli: 0,
tkToplamDigerLimit: '',
tkToplamDigerRisk: 0,
tkToplamGayriNakdiLimit: 0,
tkToplamGayriNakdiRisk: 0,
tkToplamLimit: 0,
tkToplamNakdiLimit: 0,
tkToplamNakdiRisk: 0,
tkToplamRisk: 0
},
donusDegerleri: { hataKodu: 0, islemSonucu: 0 },
ersRaporTuru: 'D',
raporTur: 'FD',
referansNo: '603A796EB0',
varlikTuru: 'G'
}
}

MongoDb Node.js slow concurrent queries

When I run concurrent MongoDb queries using Node.js, the second query always takes ~2 seconds to return. Using explain(), executionTimeMillis always returns 0ms, which is absolutely normal as my test collection has only 2 entries. Here's my reduced testcase:
'use strict'
const { MongoClient } = require('mongodb')
const main = async () => {
const client = new MongoClient('mongodb://admin:123456#localhost:27017/', {
useNewUrlParser: true,
useUnifiedTopology: true,
})
await client.connect()
const db = client.db('test')
const numbers = db.collection('numbers')
const promises = []
console.time()
for (let i = 0; i < 3; i++) {
promises.push(numbers.find({ number: i }).explain())
}
for (const promise of promises) {
console.log(await promise)
console.timeLog()
}
console.timeEnd()
await client.close()
}
main()
Output:
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 1,
executionTimeMillis: 0,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages: {
stage: 'FETCH',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 32.252ms
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 1,
executionTimeMillis: 0,
totalKeysExamined: 1,
totalDocsExamined: 1,
executionStages: {
stage: 'FETCH',
nReturned: 1,
executionTimeMillisEstimate: 0,
works: 2,
advanced: 1,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 1,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 2042.929ms
{
queryPlanner: {
plannerVersion: 1,
namespace: 'test.numbers',
indexFilterSet: false,
parsedQuery: { number: [Object] },
winningPlan: { stage: 'FETCH', inputStage: [Object] },
rejectedPlans: []
},
executionStats: {
executionSuccess: true,
nReturned: 0,
executionTimeMillis: 0,
totalKeysExamined: 0,
totalDocsExamined: 0,
executionStages: {
stage: 'FETCH',
nReturned: 0,
executionTimeMillisEstimate: 0,
works: 1,
advanced: 0,
needTime: 0,
needYield: 0,
saveState: 0,
restoreState: 0,
isEOF: 1,
invalidates: 0,
docsExamined: 0,
alreadyHasObj: 0,
inputStage: [Object]
},
allPlansExecution: []
},
serverInfo: {
host: 'DESKTOP-C7CAL9N',
port: 27017,
version: '4.0.10',
gitVersion: 'c389e7f69f637f7a1ac3cc9fae843b635f20b766'
},
ok: 1
}
default: 2062.851ms
default: 2063.513ms
If I run queries consequentially, each query takes only some milliseconds to return. Then why is the 2 seconds response time?
Edit:
In the first for loop, I made/ran "concurrent" queries promises.push(numbers.find({ number: i }).explain()). In the second for loop, I wait for promises to resolve one after another but that doesn't mean that a promise must wait till the previous one resolved to begin its job.
To avoid misunderstandings, I've made a little changes to my code, replacing the two for loops with this:
for (let i = 0; i < 3; i++) {
promises.push(
numbers
.find({ number: i })
.explain()
.then(result => {
// console.log(result)
console.log('query index:', i)
console.timeLog()
})
)
}
await Promise.all(promises)
Output:
query index: 0
default: 22.040ms
query index: 2
default: 2032.921ms
query index: 1
default: 2034.682ms
default: 2035.260ms
Edit 2:
For further clarification, I use labels to denote timers.
for (let i = 0; i < 3; i++) {
console.time(`query index: ${ i }`)
promises.push(
numbers
.find({ number: i })
.explain()
.then(result => {
// console.log(result)
console.timeEnd(`query index: ${ i }`)
})
)
}
await Promise.all(promises)
Output:
query index: 0: 12.692ms
query index: 1: 2015.143ms
query index: 2: 2015.310ms
Set MongoClient's poolSize to 1.

Chartjs export chart without html

I'm trying to export a chart created with chartjs without an actual site it's just a node backend app that creates the chart, I then need to export it and send it to slack api.
I figured I would try to create a virtual dom and then export from there but is it not working. I'm open to other approaches or fixes to this code. I'm getting an error that says window is undefined, but if I console.log(window) it says it's a window object, and everything looks normal.
const JSDOM = require("jsdom");
const Chart = require("chart.js");
const dom = new JSDOM.JSDOM(`<!DOCTYPE html><canvas id="myChart" width="400" height="400"></canvas>`);
const canvas = dom.window.document.getElementById('myChart');
const ctx = canvas.getContext('2d');
const chart = new Chart(ctx, {
type: 'line',
data: {
labels: ['Standing costs', 'Running costs'],
datasets: [{
label: 'Washing and cleaning',
data: [0, 8],
backgroundColor: '#22aa99'
}, {
label: 'Traffic tickets',
data: [0, 2],
backgroundColor: '#994499'
}, {
label: 'Tolls',
data: [0, 1],
backgroundColor: '#316395'
}, {
label: 'Parking',
data: [5, 2],
backgroundColor: '#b82e2e'
}, {
label: 'Car tax',
data: [0, 1],
backgroundColor: '#66aa00'
}, {
label: 'Repairs and improvements',
data: [0, 2],
backgroundColor: '#dd4477'
}, {
label: 'Maintenance',
data: [6, 1],
backgroundColor: '#0099c6'
}, {
label: 'Inspection',
data: [0, 2],
backgroundColor: '#990099'
}, {
label: 'Loan interest',
data: [0, 3],
backgroundColor: '#109618'
}, {
label: 'Depreciation of the vehicle',
data: [0, 2],
backgroundColor: '#109618'
}, {
label: 'Fuel',
data: [0, 1],
backgroundColor: '#dc3912'
}, {
label: 'Insurance and Breakdown cover',
data: [4, 0],
backgroundColor: '#3366cc'
}]
},
options: {
onAnimationComplete: animationDone,
responsive: false,
legend: {
position: 'right'
},
scales: {
xAxes: [{
stacked: true
}],
yAxes: [{
stacked: true
}]
}
}
});
function animationDone() {
return canvas.toDataUrl("image/jpg");
}
I just want an image file or url that I can send to slack api.
Use chartjs-node-canvas, this is a Node JS renderer for Chart.js using canvas.
It provides and alternative to chartjs-node that does not require jsdom (or the global variables that this requires) and allows chartJS as a peer dependency, so you can manage its version yourself.
This is how it will work with your code:
const { CanvasRenderService } = require('chartjs-node-canvas');
const width = 400;
const height = 400;
const chartCallback = (ChartJS) => {
// Global config example: https://www.chartjs.org/docs/latest/configuration/
ChartJS.defaults.global.elements.rectangle.borderWidth = 2;
// Global plugin example: https://www.chartjs.org/docs/latest/developers/plugins.html
ChartJS.plugins.register({
// plugin implementation
});
// New chart type example: https://www.chartjs.org/docs/latest/developers/charts.html
ChartJS.controllers.MyType = ChartJS.DatasetController.extend({
// chart implementation
});
};
const canvasRenderService = new CanvasRenderService(width, height, chartCallback);
(async () => {
const configuration = {
type: 'line',
data: {
labels: ['Standing costs', 'Running costs'],
datasets: [{
label: 'Washing and cleaning',
data: [0, 8],
backgroundColor: '#22aa99'
}, {
label: 'Traffic tickets',
data: [0, 2],
backgroundColor: '#994499'
}, {
label: 'Tolls',
data: [0, 1],
backgroundColor: '#316395'
}, {
label: 'Parking',
data: [5, 2],
backgroundColor: '#b82e2e'
}, {
label: 'Car tax',
data: [0, 1],
backgroundColor: '#66aa00'
}, {
label: 'Repairs and improvements',
data: [0, 2],
backgroundColor: '#dd4477'
}, {
label: 'Maintenance',
data: [6, 1],
backgroundColor: '#0099c6'
}, {
label: 'Inspection',
data: [0, 2],
backgroundColor: '#990099'
}, {
label: 'Loan interest',
data: [0, 3],
backgroundColor: '#109618'
}, {
label: 'Depreciation of the vehicle',
data: [0, 2],
backgroundColor: '#109618'
}, {
label: 'Fuel',
data: [0, 1],
backgroundColor: '#dc3912'
}, {
label: 'Insurance and Breakdown cover',
data: [4, 0],
backgroundColor: '#3366cc'
}]
},
options: {
responsive: false,
legend: {
position: 'right'
},
scales: {
xAxes: [{
stacked: true
}],
yAxes: [{
stacked: true
}]
}
}
};
const dataUrl = await canvasRenderService.renderToDataURL(configuration);
})();
dataUrl variable will contain the image you can pass to Slack API

Nodejs kafka consumer infinite loop

I am running kafka_2.11-2.0.0 on ubuntu 16.04 machine. Created a topic and produced some messages to it from command line interface.
And started consumer from command line, it's consuming well.
But when I started nodejs consumer like below, it's infinitely iterating. Is there anything I was missing in my client code?
var kafka = require('kafka-node'),
Consumer = kafka.Consumer,
client = new kafka.Client(),
consumer = new Consumer(
client,
[
{topic: 'mytopic', partition: 0}
],
{
autoCommit: true
}
);
consumer.on('message', function (message) {
console.log(message);
});
consumer.on('error', function (err){
console.log(err);
})
consumer.on('offsetOutOfRange', function (err){
console.log(err);
process.exit();
})
Here is the output.
{ topic: 'mytopic',
value: '',
offset: 0,
partition: 0,
highWaterOffset: 3,
key: '' }
{ topic: 'mytopic',
value: 'message2',
offset: 1,
partition: 0,
highWaterOffset: 3,
key: null }
{ topic: 'mytopic',
value: 'message3',
offset: 2,
partition: 0,
highWaterOffset: 3,
key: null }
{ topic: 'mytopic',
value: '',
offset: 0,
partition: 0,
highWaterOffset: 3,
key: '' }
{ topic: 'mytopic',
value: '',
offset: 0,
partition: 0,
highWaterOffset: 3,
key: '' }
{ topic: 'mytopic',
value: 'message2',
offset: 1,
partition: 0,
highWaterOffset: 3,
key: null }
{ topic: 'mytopic',
value: 'message3',
offset: 2,
partition: 0,
highWaterOffset: 3,
key: null }
{ topic: 'mytopic',
value: '',
offset: 0,
partition: 0,
highWaterOffset: 3,
key: '' }
{ topic: 'mytopic',
value: '',
offset: 0,
partition: 0,
highWaterOffset: 3,
key: '' }
{ topic: 'mytopic',
value: 'message2',
offset: 1,
partition: 0,
highWaterOffset: 3,
key: null }
{ topic: 'mytopic',
value: 'message3',
offset: 2,
partition: 0,
highWaterOffset: 3,
key: null }
{ topic: 'mytopic',
value: '',
offset: 0,
partition: 0,
highWaterOffset: 3,
key: '' }
Finally found that the issue with kafka new release 2.0.0. So I moved to previous version and it's working now.

How to parse jolokia response into Elasticsearch using logstash

I'm trying to create a logstash pipeline that polls a ActiveMQ jolokia endpoint. I'm wanting to collect all the metrics for the queues on the broker. I have the following pipeline.
input {
http_poller {
urls => {
health_metrics => {
method => "get"
url => "http://localhost:8161/api/jolokia/read/org.apache.activemq:type=Broker,brokerName=localhost,destinationType=Queue,destinationName=*"
headers => {
"Content-Type" => "application/json"
}
auth => {
user => "admin"
password => "admin"
}
}
}
request_timeout => 30
keepalive => false
interval => 5
codec => "json"
type => "activemq_broker_queue"
}
}
filter {
json_encode {
source => "value"
}
json {
source => "value"
}
mutate {
remove_field => ["request", "value", "timestamp"]
}
}
output {
elasticsearch {
hosts => "localhost"
# An index is created for each type of metrics inpout
index => "logstash-activmq"
document_type => "%{type}"
}
stdout {
codec => rubydebug
}
}
My jolokia response is in this format.
{
request: {
mbean: "org.apache.activemq:brokerName=localhost,destinationName=*,destinationType=Queue,type=Broker",
type: "read"
},
value: {
org.apache.activemq: brokerName=localhost,
destinationName=SEARCH,
destinationType=Queue,
type=Broker: {
ProducerFlowControl: true,
Options: "",
AlwaysRetroactive: false,
MemoryUsageByteCount: 0,
AverageBlockedTime: 0,
MemoryPercentUsage: 0,
CursorMemoryUsage: 0,
InFlightCount: 0,
Subscriptions: [],
CacheEnabled: true,
ForwardCount: 0,
DLQ: false,
StoreMessageSize: 0,
AverageEnqueueTime: 0,
Name: "SEARCH",
BlockedSends: 0,
TotalBlockedTime: 0,
MaxAuditDepth: 2048,
QueueSize: 0,
MaxPageSize: 200,
PrioritizedMessages: false,
MemoryUsagePortion: 1,
Paused: false,
EnqueueCount: 0,
MessageGroups: {
},
ConsumerCount: 0,
AverageMessageSize: 0,
CursorFull: false,
ExpiredCount: 0,
MaxProducersToAudit: 1024,
CursorPercentUsage: 0,
MinEnqueueTime: 0,
MemoryLimit: 668309914,
MinMessageSize: 0,
DispatchCount: 0,
MaxEnqueueTime: 0,
DequeueCount: 0,
BlockedProducerWarningInterval: 30000,
ProducerCount: 0,
MessageGroupType: "cached",
MaxMessageSize: 0,
UseCache: true,
SlowConsumerStrategy: null
},
org.apache.activemq: brokerName=localhost,
destinationName=weather,
destinationType=Queue,
type=Broker: {
ProducerFlowControl: true,
Options: "",
AlwaysRetroactive: false,
MemoryUsageByteCount: 0,
AverageBlockedTime: 0,
MemoryPercentUsage: 0,
CursorMemoryUsage: 0,
InFlightCount: 0,
Subscriptions: [],
CacheEnabled: true,
ForwardCount: 0,
DLQ: false,
StoreMessageSize: 0,
AverageEnqueueTime: 0,
Name: "weather",
BlockedSends: 0,
TotalBlockedTime: 0,
MaxAuditDepth: 2048,
QueueSize: 0,
MaxPageSize: 200,
PrioritizedMessages: false,
MemoryUsagePortion: 1,
Paused: false,
EnqueueCount: 0,
MessageGroups: {
},
ConsumerCount: 0,
AverageMessageSize: 0,
CursorFull: false,
ExpiredCount: 0,
MaxProducersToAudit: 1024,
CursorPercentUsage: 0,
MinEnqueueTime: 0,
MemoryLimit: 668309914,
MinMessageSize: 0,
DispatchCount: 0,
MaxEnqueueTime: 0,
DequeueCount: 0,
BlockedProducerWarningInterval: 30000,
ProducerCount: 0,
MessageGroupType: "cached",
MaxMessageSize: 0,
UseCache: true,
SlowConsumerStrategy: null
}
},
timestamp: 1453588727,
status: 200
}
I would like to be able to split the two queue destinations into two different documents and then save them to ES.
Currently I'm get an error about cannot contain '.'

Resources