Logstash Grok find source for _grokparsefailure - logstash

This is how I try to find the source of _grokparsefailure
echo '<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id="0003"
severity="info" sys="SecureWeb" sub="http" name="http access"
action="pass" method="CONNECT" srcip="10.134.240.227" dstip="" user=""
group="" ad_domain="" statuscode="407" cached="0"
profile="REF_DefaultHTTPProfile (Default Web Filter Profile)"
filteraction=" ()" size="2505" request="0x20cc5800"
url="https://vcsa.vmware.com/" referer="" error="" authtime="1"
dnstime="0" aptptime="0" cattime="0" avscantime="0" fullreqtime="201"
device="3" auth="1" ua="Apache-HttpClient/4.5.13 (Java/1.8.0_321)"
exceptions="url,ssl,certcheck,certdate"' |
/usr/share/logstash/bin/logstash -f
/etc/logstash/conf.d/10-utm.stdin.test --debug
10-utm.stdin.test:
# This is a Grok Pattern form Sophos SG UTM Log's
input { stdin { } }
output { stdout { codec => rubydebug } }
filter {
grok {
add_tag => [ "Line7" ]
}
if "sophos-utm" in [tags] {
grok {
add_tag => [ "Line11" ]
break_on_match => true
match => ["message",'<%{INT:utm_syslog_pri}>(?:%{YEAR}):(%{MONTHNUM}):(?:%{MONTHDAY})-(?:%{HOUR}):(?:%{MINUTE}):(?:%{SECOND}) (?:%{SYSLOGHOST}) (?:%{SYSLOGPROG}): id=\"%{INT:utm_id}\" .* sub=\"%{DATA:utm_sub}\"']
match => ["message",'<%{INT:utm_syslog_pri}>(?:%{YEAR}):(%{MONTHNUM}):(?:%{MONTHDAY})-(?:%{HOUR}):(?:%{MINUTE}):(?:%{SECOND}) (?:%{SYSLOGHOST}) (?:%{SYSLOGPROG}): id=\"%{INT:utm_id}\"']
match => ["message",'<%{INT:utm_syslog_pri}>(?:%{YEAR}):(%{MONTHNUM}):(?:%{MONTHDAY})-(?:%{HOUR}):(?:%{MINUTE}):(?:%{SECOND}) (?:%{SYSLOGHOST}) (?:%{SYSLOGPROG}): \[%{DATA:utm_security2}:.*\]']
match => ["message",'<%{INT:utm_syslog_pri}>(?:%{YEAR}):(%{MONTHNUM}):(?:%{MONTHDAY})-(?:%{HOUR}):(?:%{MINUTE}):(?:%{SECOND}) (?:%{SYSLOGHOST}) (?:%{SYSLOGPROG}):']
overwrite => ["MONTHNUM", "MONTHDAY", "HOUR", "MINUTE", "SECOND", "SYSLOGHOST", "SYSLOGPROG", "id"]
#tag_on_failure => []
}
grok {
add_tag => [ "Line22" ]
}
if "_grokparsefailure" in [tags] {
grok {
add_tag => [ "Line26" ]
}
}
if [process][name] == "httpd" {
if [utm_security2] {
grok {
match => ["message",'.*\[client %{IP:utm_srcip}\].* \[msg \"%{DATA:utm_msg}\"\].* \[data \"%{DATA:utm_data}\"\].* \[severity \"%{LOGLEVEL:utm_severity}\"\].* \[tag \"OWASP_TOP_10/%{DATA:utm_owasptop10}\"\].* \[hostname \"%{DATA:utm_hostname}\"\].* \[uri \"%{DATA:utm_uri}\"\]']
}
if [utm_owasptop10] == "A1" {
mutate {
replace => ["utm_owasptop10","Injection (SQL,OS,XXE,LDAP)"]
}
}
if [utm_owasptop10] == "A2" {
mutate {
replace => ["utm_owasptop10","Broken Authentification and Session Management"]
}
}
if [utm_owasptop10] == "A3" {
mutate {
replace => ["utm_owasptop10","Cross-Site Scripting"]
}
}
if [utm_owasptop10] == "A4" {
mutate {
replace => ["utm_owasptop10","Broken Access Control"]
}
}
if [utm_owasptop10] == "A5" {
mutate {
replace => ["utm_owasptop10","Security Misconfiguration"]
}
}
if [utm_owasptop10] == "A6" {
mutate {
replace => ["utm_owasptop10","Sensitive Data Exposure"]
}
}
if [utm_owasptop10] == "A7" {
mutate {
replace => ["utm_owasptop10","Insufficient Attack Protection"]
}
}
if [utm_owasptop10] == "A8" {
mutate {
replace => ["utm_owasptop10","Cross-Site Request Forgery (CSRF)"]
}
}
if [utm_owasptop10] == "A9" {
mutate {
replace => ["utm_owasptop10","Using Component with Know Vulnerabilities"]
}
}
if [utm_owasptop10] == "A10" {
mutate {
replace => ["utm_owasptop10","Underprotected APIs (SOAP,REST,RPC,GWT)"]
}
}
} else {
grok {
match => ["message",'.* srcip=\"%{IP:utm_srcip}\" localip=\"%{IP:utm_localip}\" size=\"%{INT:utm_size}\" user=\"%{DATA:utm_user}\" host=\"%{IP:utm_host}\" method=\"%{DATA:utm_method}\" statuscode=\"%{INT:utm_statuscode}\" reason=\"%{DATA:utm_reason}\" extra=\"%{DATA:utm_extra}\" exceptions=\"%{DATA:utm_exceptions}\" time=\"%{INT:utm_time}\" url=\"%{DATA:utm_url}\" server=\"%{DATA:utm_server}\" port=\"%{DATA:utm_port}\" query=\"%{DATA:utm_query}\" referer=\"%{DATA:utm_referer}\"']
}
}
# Find the GeoLite Database here : https://dev.maxmind.com/geoip/geoip2/geolite2/
geoip {
source => "utm_srcip"
target => "geoip"
database => "/etc/logstash/conf.d/data/GeoLite2-City.mmdb"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
}
if [process][name] == "httpproxy" {
grok {
# match => ["message",'.* (?:severity=\"%{LOGLEVEL:utm_severity}\") (?:sys=\"%{DATA:utm_sys}\") (?:sub=\"%{DATA}\") (?:name=\"%{DATA:utm_name}\") (?:action=\"%{DATA:utm_action}\") ?(?:method=\"%{DATA:utm_method}?\")? (?:srcip=\"%{IP:utm_srcip}?\") (?:dstip=\"%{IP:utm_dstip}?\") (?:user=\"%{DATA:utm_user}?\") (?:group=\"%{DATA:utm_group}?\") (?:ad_domain=\"%{DATA:utm_ad_domain}?\") (?:statuscode=\"%{INT:utm_satuscode}?\") (?:cached=\"%{INT:utm_cached}?\") (?:profile=\"%{DATA:utm_profil}?\") (?:filteraction=\"%{DATA:utm_filteraction}?\") (?:size=\"%{INT:utm_size}?\") (?:request=\"%{DATA:utm_request}?\") (?:url=\"%{DATA:utm_url}?\") ?(?:referer=\"%{DATA:utm_referer}?\") ?(?:error=\"%{DATA:utm_error}?\") ?(?:authtime=\"%{DATA:utm_authtime}?\") ?(?:dnstime=\"%{INT:utm_dnstime}?\") ?(?:aptptime=\"%{INT:utm_aptptime}?\") ?(?:cattime=\"%{INT:utm_cattime}?\") ?(?:avscantime=\"%{INT:utm_avscantime}?\")? ?(?:fullreqtime=\"%{INT:utm_fullreqtime}?\")? ?(?:device=\"%{INT:utm_device}?\")? ?(?:auth=\"%{INT:utm_auth}?\")? ?(?:ua=\"%{DATA:utm_ua}?\")? ?(?:exceptions=\"%{DATA:utm_exceptions}?\")? ?(?:application=\"%{DATA:utm_application}?\")? ?(?:app-id=\"%{INT:utm_app-id}?\")? ?(?:category=\"%{DATA:utm_category}?\")? ?(?:reputation=\"%{DATA:utm_reputation}?\")? ?(?:categoryname=\"%{DATA:utm_categoryname}?\")? ?(?:sandbox=\"%{DATA:utm_sandbox}?\")?']
match => ["message",'(?:severity=\"%{LOGLEVEL:utm_severity}\") (?:sys=\"%{DATA:utm_sys}\") (?:sub=\"%{DATA:utm_sub}\") (?:name=\"%{DATA:utm_name}\") (?:action=\"%{DATA:utm_action}\") ?(?:method=\"%{DATA:utm_method}?\")? (?:srcip=\"%{IP:utm_srcip}?\") (?:dstip=\"%{IP:utm_dstip}?\") (?:user=\"%{DATA:utm_user}?\") (?:group=\"%{DATA:utm_group}?\") (?:ad_domain=\"%{DATA:utm_ad_domain}?\") (?:statuscode=\"%{INT:utm_satuscode}?\") (?:cached=\"%{INT:utm_cached}?\") (?:profile=\"%{DATA:utm_profil}?\") (?:filteraction=\"%{DATA:utm_filteraction}?\") (?:size=\"%{INT:utm_size}?\") (?:request=\"%{DATA:utm_request}?\") (?:url=\"%{URI:utm_url}?\") ?(?:referer=\"%{URI:utm_referer}?\") ?(?:error=\"%{DATA:utm_error}?\") ?(?:authtime=\"%{DATA:utm_authtime}?\") ?(?:dnstime=\"%{INT:utm_dnstime}?\") ?(?:aptptime=\"%{INT:utm_aptptime}?\") ?(?:cattime=\"%{INT:utm_cattime}?\") ?(?:avscantime=\"%{INT:utm_avscantime}?\")? ?(?:fullreqtime=\"%{INT:utm_fullreqtime}?\")? ?(?:device=\"%{INT:utm_device}?\")? ?(?:auth=\"%{INT:utm_auth}?\")? ?(?:ua=\"%{DATA:utm_ua}?\")? ?(?:exceptions=\"%{DATA:utm_exceptions}?\")? ?(?:application=\"%{DATA:utm_application}?\")? ?(?:app-id=\"%{INT:utm_app-id}?\")? ?(?:category=\"%{DATA:utm_category}?\")? ?(?:reputation=\"%{DATA:utm_reputation}?\")? ?(?:categoryname=\"%{DATA:utm_categoryname}?\")? ?(?:sandbox=\"%{DATA:utm_sandbox}?\")? ?(?:country=\"%{DATA:utm_country}?\")? ?(?:content-type=\"%{DATA:utm_content_type}?\")?']
match => ['utm_url','\.(?<utm_domain>[^.]+\.[^.]+)$']
add_tag => [ "Line108" ]
}
if [utm_categoryname] == "Search Engines" {
grok {
match => ["utm_url", '.*q=(?<utm_search>[^$#&]+)(|[$#&].*)']
}
urldecode {
field => "utm_search"
}
mutate {
gsub => ["utm_search","\+"," "]
}
}
if "_grokparsefailure" in [tags] {
grok {
add_tag => [ "Line123" ]
}
}
#dns {
# reverse => ["utm_srcip"]
# action => "replace"
# }
}
if "_grokparsefailure" in [tags] {
grok {
add_tag => [ "Line134" ]
}
}
if [process][name] == "snort" {
grok {
match => ["message", '.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA:utm_sub}\" name=\"%{DATA:utm_name}\" action=\"%{DATA:utm_action}\" reason=\"%{DATA:utm_reason}\" srcip=\"%{IP:utm_srcip}\" dstip=\"%{DATA:utm_dstip}\" srcport=\"%{INT:utm_srcport}\" dstport=\"%{INT:utm_dstport}\" sid=\"%{DATA:utm_sid_snort}\" class=\"%{DATA:utm_class}\"']
}
geoip {
source => "utm_srcip"
target => "geoip"
database => "/etc/logstash/conf.d/data/GeoLite2-City.mmdb"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
}
if [process][name] == "ulogd" {
if [utm_sub] == "ips" {
grok {
match => ["message", '.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA:utm_sub}\" name=\"%{DATA:utm_name}\" (action=\"%{DATA:utm_action}\") reason=\"%{DATA:utm_reason}\" group=\"%{DATA:utm_group}\" srcip=\"%{IP:utm_srcip}\" dstip=\"%{DATA:utm_dstip}\" proto=\"%{INT:utm_proto}\" srcport=\"%{INT:utm_srcport}\" dstport=\"%{INT:utm_dstport}\" sid=\"%{DATA:utm_sid_snort}\" class=\"%{DATA:utm_class}\"']
match => ["message",'.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA:utm_sub}\" name=\"%{DATA:utm_name}\" action=\"%{DATA:utm_action}\" fwrule=\"%{INT:utm_fwrule}\" initf=\"%{DATA:utm_initf}\" srcmac=\"%{MAC:utm_srcmac}\" dstmac=\"%{MAC:utm_dstmac}\" srcip=\"%{IP:utm_srcip}\" dstip=\"%{IP:utm_dstip}\".* (srcport=\"%{INT:utm_srcport}\")?.* (dstport=\"%{INT:utm_dstport}\")?']
}
}
if [utm_sub] == "packetfilter" {
grok {
match => ["message", '.* sys=\"%{DATA:utm_sys}\" sub=\"%{DATA}\" name=\"%{DATA:utm_name}\" action=\"%{DATA:utm_action}\" fwrule=\"%{INT:utm_fwrule}\" ?(initf=\"%{DATA:utm_initf}\")? ?(outitf=\"%{DATA:utm_outif}\")? ?(srcmac=\"%{MAC:utm_srcmac}\")? ?(dstmac=\"%{MAC:utm_dstmac}\")? srcip=\"%{IP:utm_srcip}\" dstip=\"%{IP:utm_dstip}\" proto=\"%{INT:utm_protocol}\" length=\"%{INT:utm_ulogd_pkglength}\" tos=\"%{DATA:utm_ulogd_tos}\" prec=\"%{DATA:utm_ulogd_prec}\" ttl=\"%{INT:utm_ttl}\" srcport=\"%{INT:utm_srcport}\" dstport=\"%{INT:utm_dstport}\" ?(tcpflags=\"%{DATA:utm_tcpflags}\")? ?(info=\"%{DATA:utm_info}\")?']
}
}
}
if [process][name] == "awelogger" {
grok {
match => ["message", '.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA}\" name=\"%{DATA:utm_name}\" ssid=\"%{DATA:utm_ssid}\".* bssid=\"%{MAC:utm_bssid}\"']
}
}
if [process][name] == "awed" {
grok {
match => ["message", '.* \[{%DATA:utm_ap}\] .* from %{IP:utm_srcip}:%{INT:utm_port}']
}
}
#if [process][name] == "hostapd" {
# grok {
# match => ["message", '.*: {%DATA:utm_intf}: .* from %{IP:utm_srcip}:%{INT:utm_port}']
# }
#}
if [process][name] in ["openvpn", "pppd-l2tp"] {
grok {
match => ["message", '.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA}\" event=\"%{DATA:utm_event}\" username=\"%{DATA:utm_username}\" variant=\"%{DATA:utm_variant}\" srcip=\"%{IP:utm_srcip}\".* virtual_ip=\"%{IP:utm_virtual_ip}\"']
}
geoip {
source => "utm_srcip"
target => "geoip"
database => "/etc/logstash/conf.d/data/GeoLite2-City.mmdb"
add_field => [ "[geoip][coordinates]", "%{[geoip][longitude]}" ]
add_field => [ "[geoip][coordinates]", "%{[geoip][latitude]}" ]
}
mutate {
convert => [ "[geoip][coordinates]", "float"]
}
}
if [process][name] == "pluto" {
grok {
match => ["message", '.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA}\" event=\"%{DATA:utm_event}\" variant=\"%{DATA:utm_variant}\" connection=\"%{DATA:utm_connection}\" address=\"%{IP:utm_address}\" local_net=\"%{DATA:utm_local_net}\" remote_net=\"%{DATA:utm_remote_net}\"']
}
}
if [process][name] == "afcd" {
grok {
match => ["message", '.* severity=\"%{LOGLEVEL:utm_severity}\" sys=\"%{DATA:utm_sys}\" sub=\"%{DATA}\" name=\"%{DATA:utm_name}\" srcip=\"%{IP:utm_srcip}\" dstip=\"%{IP:utm_dstip}\" .* threatname=\"%{DATA:utm_threatname}\" .* host=\"%{DATA:utm_host}\" .* action=\"%{DATA:utm_action}\"']
}
}
mutate {
replace => ["type","sophosutm"]
add_field => ["utm_size_number","%{utm_size}"]
}
if "_grokparsefailure" in [tags] {
grok {
add_tag => [ "Line222" ]
}
}
mutate {
convert => {"utm_size_number" => "integer"}
}
if "_grokparsefailure" in [tags] {
grok {
add_tag => [ "Line230" ]
}
}
}
}
I think this is where the Tag gets added, but I have no clue why
[DEBUG] 2022-10-24 14:01:31.531 [[main]>worker1] grok - Running grok filter {:event=>{"#version"=>"1", "#timestamp"=>2022-10-24T14:01:31.415658841Z, "message"=>"<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id=\"0003\" severity=\"info\" sys=\"SecureWeb\" sub=\"http\" name=\"http access\" action=\"pass\" method=\"CONNECT\" srcip=\"10.134.240.227\" dstip=\"\" user=\"\" group=\"\" ad_domain=\"\" statuscode=\"407\" cached=\"0\" profile=\"REF_DefaultHTTPProfile (Default Web Filter Profile)\" filteraction=\" ()\" size=\"2505\" request=\"0x20cc5800\" url=\"https://vcsa.vmware.com/\" referer=\"\" error=\"\" authtime=\"1\" dnstime=\"0\" aptptime=\"0\" cattime=\"0\" avscantime=\"0\" fullreqtime=\"201\" device=\"3\" auth=\"1\" ua=\"Apache-HttpClient/4.5.13 (Java/1.8.0_321)\" exceptions=\"url,ssl,certcheck,certdate\"", "host"=>{"hostname"=>"elk-1-test"}, "event"=>{"original"=>"<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id=\"0003\" severity=\"info\" sys=\"SecureWeb\" sub=\"http\" name=\"http access\" action=\"pass\" method=\"CONNECT\" srcip=\"10.134.240.227\" dstip=\"\" user=\"\" group=\"\" ad_domain=\"\" statuscode=\"407\" cached=\"0\" profile=\"REF_DefaultHTTPProfile (Default Web Filter Profile)\" filteraction=\" ()\" size=\"2505\" request=\"0x20cc5800\" url=\"https://vcsa.vmware.com/\" referer=\"\" error=\"\" authtime=\"1\" dnstime=\"0\" aptptime=\"0\" cattime=\"0\" avscantime=\"0\" fullreqtime=\"201\" device=\"3\" auth=\"1\" ua=\"Apache-HttpClient/4.5.13 (Java/1.8.0_321)\" exceptions=\"url,ssl,certcheck,certdate\""}}}
[DEBUG] 2022-10-24 14:01:31.532 [[main]>worker1] grok - Event now: {:event=>{"#version"=>"1", "message"=>"<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id=\"0003\" severity=\"info\" sys=\"SecureWeb\" sub=\"http\" name=\"http access\" action=\"pass\" method=\"CONNECT\" srcip=\"10.134.240.227\" dstip=\"\" user=\"\" group=\"\" ad_domain=\"\" statuscode=\"407\" cached=\"0\" profile=\"REF_DefaultHTTPProfile (Default Web Filter Profile)\" filteraction=\" ()\" size=\"2505\" request=\"0x20cc5800\" url=\"https://vcsa.vmware.com/\" referer=\"\" error=\"\" authtime=\"1\" dnstime=\"0\" aptptime=\"0\" cattime=\"0\" avscantime=\"0\" fullreqtime=\"201\" device=\"3\" auth=\"1\" ua=\"Apache-HttpClient/4.5.13 (Java/1.8.0_321)\" exceptions=\"url,ssl,certcheck,certdate\"", "#timestamp"=>2022-10-24T14:01:31.415658841Z, "host"=>{"hostname"=>"elk-1-test"}, "tags"=>["_grokparsefailure"], "event"=>{"original"=>"<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id=\"0003\" severity=\"info\" sys=\"SecureWeb\" sub=\"http\" name=\"http access\" action=\"pass\" method=\"CONNECT\" srcip=\"10.134.240.227\" dstip=\"\" user=\"\" group=\"\" ad_domain=\"\" statuscode=\"407\" cached=\"0\" profile=\"REF_DefaultHTTPProfile (Default Web Filter Profile)\" filteraction=\" ()\" size=\"2505\" request=\"0x20cc5800\" url=\"https://vcsa.vmware.com/\" referer=\"\" error=\"\" authtime=\"1\" dnstime=\"0\" aptptime=\"0\" cattime=\"0\" avscantime=\"0\" fullreqtime=\"201\" device=\"3\" auth=\"1\" ua=\"Apache-HttpClient/4.5.13 (Java/1.8.0_321)\" exceptions=\"url,ssl,certcheck,certdate\""}}}
{
"#version" => "1",
"message" => "<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id=\"0003\" severity=\"info\" sys=\"SecureWeb\" sub=\"http\" name=\"http access\" action=\"pass\" method=\"CONNECT\" srcip=\"10.134.240.227\" dstip=\"\" user=\"\" group=\"\" ad_domain=\"\" statuscode=\"407\" cached=\"0\" profile=\"REF_DefaultHTTPProfile (Default Web Filter Profile)\" filteraction=\" ()\" size=\"2505\" request=\"0x20cc5800\" url=\"https://vcsa.vmware.com/\" referer=\"\" error=\"\" authtime=\"1\" dnstime=\"0\" aptptime=\"0\" cattime=\"0\" avscantime=\"0\" fullreqtime=\"201\" device=\"3\" auth=\"1\" ua=\"Apache-HttpClient/4.5.13 (Java/1.8.0_321)\" exceptions=\"url,ssl,certcheck,certdate\"",
"#timestamp" => 2022-10-24T14:01:31.415658841Z,
"host" => {
"hostname" => "elk-1-test"
},
"tags" => [
[0] "_grokparsefailure"
],
"event" => {
"original" => "<30>2022:10:24-15:08:28 utm-1 httpproxy[28052]: id=\"0003\" severity=\"info\" sys=\"SecureWeb\" sub=\"http\" name=\"http access\" action=\"pass\" method=\"CONNECT\" srcip=\"10.134.240.227\" dstip=\"\" user=\"\" group=\"\" ad_domain=\"\" statuscode=\"407\" cached=\"0\" profile=\"REF_DefaultHTTPProfile (Default Web Filter Profile)\" filteraction=\" ()\" size=\"2505\" request=\"0x20cc5800\" url=\"https://vcsa.vmware.com/\" referer=\"\" error=\"\" authtime=\"1\" dnstime=\"0\" aptptime=\"0\" cattime=\"0\" avscantime=\"0\" fullreqtime=\"201\" device=\"3\" auth=\"1\" ua=\"Apache-HttpClient/4.5.13 (Java/1.8.0_321)\" exceptions=\"url,ssl,certcheck,certdate\""
}
}
[DEBUG] 2022-10-24 14:01:31.671 [[main]-pipeline-manager] javapipeline - Shutdown waiting for worker thread {:pipeline_id=>"main", :thread=>"#<LogStash::WorkerLoopThread:0x57397be3 dead>"}
[DEBUG] 2022-10-24 14:01:31.671 [[main]-pipeline-manager] javapipeline - Shutdown waiting for worker thread {:pipeline_id=>"main", :thread=>"#<LogStash::WorkerLoopThread:0x56f4bfea dead>"}
[DEBUG] 2022-10-24 14:01:31.672 [[main]-pipeline-manager] grok - Closing {:plugin=>"LogStash::Filters::Grok"}
[DEBUG] 2022-10-24 14:01:31.673 [[main]-pipeline-manager] pluginmetadata - Removing metadata for plugin e489c8cb24e095cea22f0d0ea0836e8556029d1f12126d19d9dfbf7ecd8c43d1
[DEBUG] 2022-10-24 14:01:31.673 [[main]-pipeline-manager] grok - Closing {:plugin=>"LogStash::Filters::Grok"}
EDIT:
Thank you #Badger
This is working for me, to add a simple Tag for debugging
if "_grokparsefailure" in [tags] {
grok {
match => ["message",'%{GREEDYDATA}']
add_tag => [ "Line134-Fail" ]
remove_tag => ["_grokparsefailure"]
}
}

Your event does not have any tags, so your entire configuration is equivalent to
grok { add_tag => [ "Line7" ] }
which does add a "_grokparsefailure". That happens because the match function defaults to failure, and if given an empty hash of matches to check it returns false.

Related

Add log4net Level field to logstash.conf file

I'm trying to add LEVEL field (so it shows up in Kibana). My logstash.conf
Input:
2018-03-18 15:43:40.7914 - INFO: Tick
2018-03-18 15:43:40.7914 - ERROR: Tock
file:
input {
beats {
port => 5044
}
}
filter {
grok {
match => {
"message" => "(?m)^%{TIMESTAMP_ISO8601:timestamp}~~\[%{DATA:thread}\]~~\[%{DATA:user}\]~~\[%{DATA:requestId}\]~~\[%{DATA:userHost}\]~~\[%{DATA:requestUrl}\]~~%{DATA:level}~~%{DATA:logger}~~%{DATA:logmessage}~~%{DATA:exception}\|\|"
}
match => {
"levell" => "(?m)^%{DATA:level}"
}
add_field => {
"received_at" => "%{#timestamp}"
"received_from" => "%{host}"
"level" => "levell"
}
remove_field => ["message"]
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss:SSS" ]
}
}
output {
elasticsearch {
hosts => ["http://localhost:9200"]
sniffing => true
index => "filebeat-%{+YYYY.MM.dd}"
document_type => "%{[#metadata][type]}"
#user => "elastic"
#password => "changeme"
}
stdout { codec => rubydebug }
}
this prints out "levell" instead of "INFO/ERROR" etc
EDIT:
Input:
2018-03-18 15:43:40.7914 - INFO: Tick
configuration:
# Sample Logstash configuration for creating a simple
# Beats -> Logstash -> Elasticsearch pipeline.
input {
beats {
port => 5044
}
}
filter {
grok {
match => { "message" => "(?m)^%{TIMESTAMP_ISO8601:timestamp}~~\[%{DATA:thread}\]~~\[%{DATA:user}\]~~\[%{DATA:requestId}\]~~\[%{DATA:userHost}\]~~\[%{DATA:requestUrl}\]~~%{DATA:level}~~%{DATA:logger}~~%{DATA:logmessage}~~%{DATA:exception}\|\|" }
add_field => {
"received_at" => "%{#timestamp}"
"received_from" => "%{host}"
}
}
grok {
match => { "message" => "- %{LOGLEVEL:level}" }
remove_field => ["message"]
}
date {
match => [ "timestamp", "yyyy-MM-dd HH:mm:ss:SSS" ]
}
}
output {
elasticsearch {
hosts => ["http://localhost:9200"]
sniffing => true
index => "filebeat-%{+YYYY.MM.dd}"
document_type => "%{[#metadata][type]}"
#user => "elastic"
#password => "changeme"
}
stdout { codec => rubydebug }
}
Output I'm getting. Still missing received_at and level:
In that part of the configuration:
add_field => {
"received_at" => "%{#timestamp}"
"received_from" => "%{host}"
"level" => "levell"
}
When using "level" => "levell", you just put the String levell in the field level. To put the value of the field named levell, you have to use %{levell}. So in you case, it would look like:
add_field => {
"received_at" => "%{#timestamp}"
"received_from" => "%{host}"
"level" => "%{levell}"
}
Also the grok#match, according to the documentation:
A hash that defines the mapping of where to look, and with which patterns.
So trying to match on the levell field won't work, since it look like it doesn't exist yet. And the grok pattern you're using to match the message field don't match the example you provided.

Grok help for a custom metric

I have a log line like this:
09 Nov 2018 15:51:35 DEBUG api.MapAnythingProvider - Calling API For Client: XXX Number of ELEMENTS Requested YYY
I want to ignore all other log lines and only want those lines that have the words "Calling API For Client" in it. Further, I am only interested in the String XXX and Number YYY.
Thanks for the help.
input {
file {
path => ["C:/apache-tomcat-9.0.7/logs/service/service.log"]
sincedb_path => "nul"
start_position => "beginning"
}
}
filter {
grok {
match => {
"message" => "%{MONTHDAY:monthDay} %{MONTH:mon} %{YEAR:year} %{TIME:ts} %{WORD:severity} %{JAVACLASS:claz} - %{GREEDYDATA:logmessage}"
}
}
grok {
match => {
"logmessage" => "%{WORD:keyword} %{WORD:customer} %{WORD:key2} %{NUMBER:mapAnythingCreditsConsumed:float} %{WORD:key3} %{NUMBER:elementsFromCache:int}"
}
}
if "_grokparsefailure" in [tags] {
drop {}
}
mutate {
remove_field => [ "monthDay", "mon", "ts", "severity", "claz", "keyword", "key2", "path", "message", "year", "key3" ]
}
}
output {
if [logmessage] =~ /ExecutingJobFor/ {
elasticsearch {
hosts => ["localhost:9200"]
index => "test"
manage_template => false
}
stdout {
codec => rubydebug
}
}
}

Logstash stopping randomly after few hours

I'm getting a random error when I run logstash:
16:30:26.240 [[main]>worker0] ERROR logstash.pipeline - Exception in
pipelineworker, the pipeline stopped processing new events, please
check your filter configuration and restart Logstash.
{"exception"=>#,
"backtrace"=>["org/jruby/RubyString.java:3101:in gsub'",
"org/jruby/RubyString.java:3069:ingsub'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:317:in
gsub_dynamic_fields'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:308:in
gsub'", "org/jruby/RubyArray.java:1613:in each'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:290:in
gsub'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:207:in
filter'",
"/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:145:in
do_filter'",
"/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:164:in
multi_filter'", "org/jruby/RubyArray.java:1613:ineach'",
"/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:161:in
multi_filter'",
"/usr/share/logstash/logstash-core/lib/logstash/filter_delegator.rb:41:in
multi_filter'", "(eval):4135:in initialize'",
"org/jruby/RubyArray.java:1613:ineach'", "(eval):4131:in
initialize'", "org/jruby/RubyProc.java:281:incall'", "(eval):997:in
filter_func'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:295:in
filter_batch'", "org/jruby/RubyProc.java:281:in call'",
"/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb:192:in
each'", "org/jruby/RubyHash.java:1342:in each'",
"/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb:191:in
each'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:294:in
filter_batch'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:282:in
worker_loop'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:258:in
start_workers'"]} 16:30:26.542 [LogStash::Runner] FATAL
logstash.runner - An unexpected error occurred!
{:error=>#<InterruptedRegexpError: Regexp Interrupted>,
:backtrace=>["org/jruby/RubyString.java:3101:ingsub'",
"org/jruby/RubyString.java:3069:in gsub'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:317:in
gsub_dynamic_fields'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:308:in
gsub'", "org/jruby/RubyArray.java:1613:ineach'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:290:in
gsub'",
"/usr/share/logstash/vendor/bundle/jruby/1.9/gems/logstash-filter-mutate-3.1.3/lib/logstash/filters/mutate.rb:207:in
filter'",
"/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:145:in
do_filter'",
"/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:164:in
multi_filter'", "org/jruby/RubyArray.java:1613:in each'",
"/usr/share/logstash/logstash-core/lib/logstash/filters/base.rb:161:in
multi_filter'",
"/usr/share/logstash/logstash-core/lib/logstash/filter_delegator.rb:41:in
multi_filter'", "(eval):4135:ininitialize'",
"org/jruby/RubyArray.java:1613:in each'", "(eval):4131:in
initialize'", "org/jruby/RubyProc.java:281:in call'", "(eval):997:in
filter_func'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:295:in
filter_batch'", "org/jruby/RubyProc.java:281:incall'",
"/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb:192:in
each'", "org/jruby/RubyHash.java:1342:ineach'",
"/usr/share/logstash/logstash-core/lib/logstash/util/wrapped_synchronous_queue.rb:191:in
each'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:294:in
filter_batch'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:282:in
worker_loop'",
"/usr/share/logstash/logstash-core/lib/logstash/pipeline.rb:258:in
start_workers'"]}
My logstash config file is:
input {
file {
type => "SystemError"
path => "/app/systemerr/**/*"
start_position => "beginning"
codec => multiline {
pattern => "^\s"
what => "previous"
}
}
file {
type => "SystemOut"
path => "/app/systemout/**/*"
start_position => "beginning"
codec => multiline {
pattern => "^\["
negate => true
what => "previous"
}
}
file {
type => "Errorlog"
path => "/app/error/**/*"
start_position => "beginning"
codec => multiline {
pattern => "^FATAL"
negate => true
what => "previous"
}
}
file {
type => "Messagelog"
path => "/app/message/**/*"
start_position => "beginning"
codec => multiline {
pattern => "^ERROR"
negate => true
what => "previous"
}
}
}
filter {
if [type] == "SystemError" {
grok {
match => { "message" => "\[%{DATA:timestamp}] %{BASE16NUM:threadID} (?<shortname>\b[A-Za-z0-9\$]{2,}\b)%{SPACE}%{WORD:loglevel}%{SPACE} %{GREEDYDATA:message}" }
overwrite => [ "message" ]
}
mutate {
gsub => ["timestamp", " GMT\+05\:30", ""]
}
date {
match => ["timestamp", "M/dd/yy HH:mm:ss:SSS"]
}
if ([message] =~ "^\tat") {
drop {}
}
if ([path] =~ "113") {
mutate {
add_field => { "server" => "113" }
}
} else {
mutate {
add_field => { "server" => "117" }
}
}
}
if [type] == "SystemOut" {
grok {
match => { "message" => "\[%{DATA:timestamp}] %{BASE16NUM:threadID} (?<shortname>\b[A-Za-z0-9\$]{2,}\b)%{SPACE}%{WORD:loglevel}%{SPACE} %{GREEDYDATA:message}" }
overwrite => [ "message" ]
}
mutate {
gsub => ["timestamp", " GMT\+05\:30", ""]
}
date {
match => ["timestamp", "M/dd/yy HH:mm:ss:SSS"]
}
if ([path] =~ "113") {
mutate {
add_field => { "server" => "113" }
}
} else {
mutate {
add_field => { "server" => "117" }
}
}
}
if [type] == "Errorlog" {
grok {
match => { "message" => "%{LOGLEVEL:loglevel} \| %{TIMESTAMP_ISO8601:timestamp} \| %{DATA:string} \: %{DATA:WebContainer} \| %{DATA:code} \| %{DATA:country} \| %{DATA:user} \| %{GREEDYDATA:message}" }
overwrite => [ "message" ]
}
date {
match => ["timestamp", "yyyy-M-dd HH:mm:ss,SSS"]
}
mutate { remove_field => [ "string" ] }
if ([path] =~ "113") {
mutate {
add_field => { "server" => "113" }
}
} else {
mutate {
add_field => { "server" => "117" }
}
}
}
if [type] == "Messagelog" {
grok {
match => { "message" => "%{LOGLEVEL:loglevel} \| %{TIMESTAMP_ISO8601:timestamp} \| %{DATA:string} \: %{DATA:WebContainer} \| %{DATA:code} \| %{DATA:country} \| %{DATA:user} \| %{GREEDYDATA:message}" }
overwrite => [ "message" ]
}
date {
match => ["timestamp", "yyyy-M-dd HH:mm:ss,SSS"]
}
mutate {
remove_field => [ "string" ]
}
if ([path] =~ "113") {
mutate {
add_field => { "server" => "113" }
}
} else {
mutate {
add_field => { "server" => "117" }
}
}
}
}
Is there anything wrong in the config file? Please help.
you are likely getting a _grokparsefailure and so the timestamp field isn't set. You can surround the mutate/date with an if block like this:
if "_grokparsefailure" not in [tags] {
mutate {
gsub => ["timestamp", " GMT\+05\:30", ""]
}
date {
match => ["timestamp", "M/dd/yy HH:mm:ss:SSS"]
}
}
you may also want to add an else { drop {} }, but you should probably figure out what isn't matching first.

bro-ids logstash filter not working

I've set up an ELK stack on centos 7, and are forwarding logs from a freebsd 11 host which runs bro. However my filters are not working to correctly parse the bro logs.
This is the current set up:
freebsd filebeat client
filebeat.yml
filebeat:
registry_file: /var/run/.filebeat
prospectors:
-
paths:
- /var/log/messages
- /var/log/maillog
- /var/log/auth.log
- /var/log/cron
- /var/log/debug.log
- /var/log/devd.log
- /var/log/ppp.log
- /var/log/netatalk.log
- /var/log/setuid.today
- /var/log/utx.log
- /var/log/rkhunter.log
- /var/log/userlog
- /var/log/sendmail.st
- /var/log/xferlog
input_type: log
document_type: syslog
-
paths:
- /var/log/bro/current/app_stats.log
input_type: log
document_type: bro_app_stats
-
paths:
- /var/log/bro/current/communication.log
input_type: log
document_type: bro_communication
-
paths:
- /var/log/bro/current/conn.log
input_type: log
document_type: bro_conn
-
paths:
- /var/log/bro/current/dhcp.log
input_type: log
document_type: bro_dhcp
-
paths:
- /var/log/bro/current/dns.log
input_type: log
document_type: bro_dns
-
paths:
- /var/log/bro/current/dpd.log
input_type: log
document_type: bro_dpd
-
paths:
- /var/log/bro/current/files.log
input_type: log
document_type: bro_files
-
paths:
- /var/log/bro/current/ftp.log
input_type: log
document_type: bro_ftp
-
paths:
- /var/log/bro/current/http.log
input_type: log
document_type: bro_http
-
paths:
- /var/log/bro/current/known_certs.log
input_type: log
document_type: bro_app_known_certs
-
paths:
- /var/log/bro/current/known_hosts.log
input_type: log
document_type: bro_known_hosts
-
paths:
- /var/log/bro/current/known_services.log
input_type: log
document_type: bro_known_services
-
paths:
- /var/log/bro/current/notice.log
input_type: log
document_type: bro_notice
-
paths:
- /var/log/bro/current/smtp.log
input_type: log
document_type: bro_smtp
-
paths:
- /var/log/bro/current/software.log
input_type: log
document_type: bro_software
-
paths:
- /var/log/bro/current/ssh.log
input_type: log
document_type: bro_ssh
-
paths:
- /var/log/bro/current/ssl.log
input_type: log
document_type: bro_ssl
-
paths:
- /var/log/bro/current/weird.log
input_type: log
document_type: bro_weird
-
paths:
- /var/log/bro/current/x509.log
input_type: log
document_type: bro_x509
then on the centos ELK server I have 4 configs:
/etc/logstash/conf.d/02-beats-input.conf
input {
beats {
port => 5044
}
}
/etc/logstash/conf.d/10-syslog-filter.conf
filter {
if [type] == "syslog" {
grok {
match => { "message" => "%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{DATA:syslog_program}(?:\[%{POSINT:syslog_pid}\])?: %{GREEDYDATA:syslog_message}" }
add_field => [ "received_at", "%{#timestamp}" ]
add_field => [ "received_from", "%{host}" ]
}
syslog_pri { }
date {
match => [ "syslog_timestamp", "MMM d HH:mm:ss", "MMM dd HH:mm:ss" ]
}
}
}
/etc/logstash/conf.d/20-bro-ids-filter.conf
filter {
# bro_app_stats ######################
if [type] == "bro_app" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<ts_delta>(.*?))\t(?<app>(.*?))\t(?<uniq_hosts>(.*?))\t(?<hits>(.*?))\t(?<bytes>(.*))" ]
}
}
# bro_conn ######################
if [type] == "bro_conn" {
grok {
match => [
"message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<service>(.*?))\t(?<duration>(.*?))\t(?<orig_bytes>(.*?))\t(?<resp_bytes>(.*?))\t(?<conn_state>(.*?))\t(?<local_orig>(.*?))\t(?<missed_bytes>(.*?))\t(?<history>(.*?))\t(?<orig_pkts>(.*?))\t(?<orig_ip_bytes>(.*?))\t(?<resp_pkts>(.*?))\t(?<resp_ip_bytes>(.*?))\t(?<tunnel_parents>(.*?))\t(?<orig_cc>(.*?))\t(?<resp_cc>(.*?))\t(?<sensorname>(.*))",
"message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<service>(.*?))\t(?<duration>(.*?))\t(?<orig_bytes>(.*?))\t(?<resp_bytes>(.*?))\t(?<conn_state>(.*?))\t(?<local_orig>(.*?))\t(?<missed_bytes>(.*?))\t(?<history>(.*?))\t(?<orig_pkts>(.*?))\t(?<orig_ip_bytes>(.*?))\t(?<resp_pkts>(.*?))\t(?<resp_ip_bytes>(.*?))\t(%{NOTSPACE:tunnel_parents})"
]
}
}
# bro_notice ######################
if [type] == "bro_notice" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<fuid>(.*?))\t(?<file_mime_type>(.*?))\t(?<file_desc>(.*?))\t(?<proto>(.*?))\t(?<note>(.*?))\t(?<msg>(.*?))\t(?<sub>(.*?))\t(?<src>(.*?))\t(?<dst>(.*?))\t(?<p>(.*?))\t(?<n>(.*?))\t(?<peer_descr>(.*?))\t(?<actions>(.*?))\t(?<suppress_for>(.*?))\t(?<dropped>(.*?))\t(?<remote_location.country_code>(.*?))\t(?<remote_location.region>(.*?))\t(?<remote_location.city>(.*?))\t(?<remote_location.latitude>(.*?))\t(?<remote_location.longitude>(.*))" ]
}
}
# bro_dhcp ######################
if [type] == "bro_dhcp" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<mac>(.*?))\t(?<assigned_ip>(.*?))\t(?<lease_time>(.*?))\t(?<trans_id>(.*))" ]
}
}
# bro_dns ######################
if [type] == "bro_dns" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<trans_id>(.*?))\t(?<query>(.*?))\t(?<qclass>(.*?))\t(?<qclass_name>(.*?))\t(?<qtype>(.*?))\t(?<qtype_name>(.*?))\t(?<rcode>(.*?))\t(?<rcode_name>(.*?))\t(?<AA>(.*?))\t(?<TC>(.*?))\t(?<RD>(.*?))\t(?<RA>(.*?))\t(?<Z>(.*?))\t(?<answers>(.*?))\t(?<TTLs>(.*?))\t(?<rejected>(.*))" ]
}
}
# bro_software ######################
if [type] == "bro_software" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*?))\t(?<host_p>(.*?))\t(?<software_type>(.*?))\t(?<name>(.*?))\t(?<version.major>(.*?))\t(?<version.minor>(.*?))\t(?<version.minor2>(.*?))\t(?<version.minor3>(.*?))\t(?<version.addl>(.*?))\t(?<unparsed_version>(.*))" ]
}
}
# bro_dpd ######################
if [type] == "bro_dpd" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<proto>(.*?))\t(?<analyzer>(.*?))\t(?<failure_reason>(.*))" ]
}
}
# bro_files ######################
if [type] == "bro_files" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<fuid>(.*?))\t(?<tx_hosts>(.*?))\t(?<rx_hosts>(.*?))\t(?<conn_uids>(.*?))\t(?<source>(.*?))\t(?<depth>(.*?))\t(?<analyzers>(.*?))\t(?<mime_type>(.*?))\t(?<filename>(.*?))\t(?<duration>(.*?))\t(?<local_orig>(.*?))\t(?<is_orig>(.*?))\t(?<seen_bytes>(.*?))\t(?<total_bytes>(.*?))\t(?<missing_bytes>(.*?))\t(?<overflow_bytes>(.*?))\t(?<timedout>(.*?))\t(?<parent_fuid>(.*?))\t(?<md5>(.*?))\t(?<sha1>(.*?))\t(?<sha256>(.*?))\t(?<extracted>(.*))" ]
}
}
# bro_http ######################
if [type] == "bro_http" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<trans_depth>(.*?))\t(?<method>(.*?))\t(?<bro_host>(.*?))\t(?<uri>(.*?))\t(?<referrer>(.*?))\t(?<user_agent>(.*?))\t(?<request_body_len>(.*?))\t(?<response_body_len>(.*?))\t(?<status_code>(.*?))\t(?<status_msg>(.*?))\t(?<info_code>(.*?))\t(?<info_msg>(.*?))\t(?<filename>(.*?))\t(?<http_tags>(.*?))\t(?<username>(.*?))\t(?<password>(.*?))\t(?<proxied>(.*?))\t(?<orig_fuids>(.*?))\t(?<orig_mime_types>(.*?))\t(?<resp_fuids>(.*?))\t(?<resp_mime_types>(.*))" ]
}
}
# bro_known_certs ######################
if [type] == "bro_known_certs" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*?))\t(?<port_num>(.*?))\t(?<subject>(.*?))\t(?<issuer_subject>(.*?))\t(?<serial>(.*))" ]
}
}
# bro_known_hosts ######################
if [type] == "bro_known_hosts" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*))" ]
}
}
# bro_known_services ######################
if [type] == "bro_known_services" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<bro_host>(.*?))\t(?<port_num>(.*?))\t(?<port_proto>(.*?))\t(?<service>(.*))" ]
}
}
# bro_ssh ######################
if [type] == "bro_ssh" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<status>(.*?))\t(?<direction>(.*?))\t(?<client>(.*?))\t(?<server>(.*?))\t(?<remote_location.country_code>(.*?))\t(?<remote_location.region>(.*?))\t(?<remote_location.city>(.*?))\t(?<remote_location.latitude>(.*?))\t(?<remote_location.longitude>(.*))" ]
}
}
# bro_ssl ######################
if [type] == "bro_ssl" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<version>(.*?))\t(?<cipher>(.*?))\t(?<server_name>(.*?))\t(?<session_id>(.*?))\t(?<subject>(.*?))\t(?<issuer_subject>(.*?))\t(?<not_valid_before>(.*?))\t(?<not_valid_after>(.*?))\t(?<last_alert>(.*?))\t(?<client_subject>(.*?))\t(?<client_issuer_subject>(.*?))\t(?<cert_hash>(.*?))\t(?<validation_status>(.*))" ]
}
}
# bro_weird ######################
if [type] == "bro_weird" {
grok {
match => [ "message", "(?<ts>(.*?))\t(?<uid>(.*?))\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t(?<name>(.*?))\t(?<addl>(.*?))\t(?<notice>(.*?))\t(?<peer>(.*))" ]
}
}
# bro_x509 #######################
if [type] == "bro_x509" {
csv {
#x509.log:#fields ts id certificate.version certificate.serial certificate.subject certificate.issuer certificate.not_valid_before certificate.not_valid_after certificate.key_alg certificate.sig_alg certificate.key_type certificate.key_length certificate.exponent certificate.curve san.dns san.uri san.email san.ip basic_constraints.ca basic_constraints.path_len
columns => ["ts","id","certificate.version","certificate.serial","certificate.subject","icertificate.issuer","certificate.not_valid_before","certificate.not_valid_after","certificate.key_alg","certificate.sig_alg","certificate.key_type","certificate.key_length","certificate.exponent","certificate.curve","san.dns","san.uri","san.email","san.ip","basic_constraints.ca","basic_constraints.path_len"]
#If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.
separator => " "
}
#Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively
date {
match => [ "ts", "UNIX" ]
}
}
if [type]== "bro_intel" {
grok {
match => [ "message", "(?<ts>(.*?))\t%{DATA:uid}\t(?<id.orig_h>(.*?))\t(?<id.orig_p>(.*?))\t(?<id.resp_h>(.*?))\t(?<id.resp_p>(.*?))\t%{DATA:fuid}\t%{DATA:file_mime_type}\t%{DATA:file_desc}\t(?<seen.indicator>(.*?))\t(?<seen.indicator_type>(.*?))\t(?<seen.where>(.*?))\t%{NOTSPACE:sources}" ]
}
}
}
date {
match => [ "ts", "UNIX" ]
}
}
filter {
if "bro" in [type] {
if [id.orig_h] {
mutate {
add_field => [ "senderbase_lookup", "http://www.senderbase.org/lookup/?search_string=%{id.orig_h}" ]
add_field => [ "CBL_lookup", "http://cbl.abuseat.org/lookup.cgi?ip=%{id.orig_h}" ]
add_field => [ "Spamhaus_lookup", "http://www.spamhaus.org/query/bl?ip=%{id.orig_h}" ]
}
}
mutate {
add_tag => [ "bro" ]
}
mutate {
convert => [ "id.orig_p", "integer" ]
convert => [ "id.resp_p", "integer" ]
convert => [ "orig_bytes", "integer" ]
convert => [ "resp_bytes", "integer" ]
convert => [ "missed_bytes", "integer" ]
convert => [ "orig_pkts", "integer" ]
convert => [ "orig_ip_bytes", "integer" ]
convert => [ "resp_pkts", "integer" ]
convert => [ "resp_ip_bytes", "integer" ]
}
}
}
filter {
if [type] == "bro_conn" {
#The following makes use of the translate filter (stash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection
translate {
field => "conn_state"
destination => "conn_state_full"
dictionary => [
"S0", "Connection attempt seen, no reply",
"S1", "Connection established, not terminated",
"S2", "Connection established and close attempt by originator seen (but no reply from responder)",
"S3", "Connection established and close attempt by responder seen (but no reply from originator)",
"SF", "Normal SYN/FIN completion",
"REJ", "Connection attempt rejected",
"RSTO", "Connection established, originator aborted (sent a RST)",
"RSTR", "Established, responder aborted",
"RSTOS0", "Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder",
"RSTRH", "Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator",
"SH", "Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)",
"SHR", "Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator",
"OTH", "No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)"
]
}
}
}
# Resolve #source_host to FQDN if possible if missing for some types of ging using source_host_ip from above
filter {
if [id.orig_h] {
if ![id.orig_h-resolved] {
mutate {
add_field => [ "id.orig_h-resolved", "%{id.orig_h}" ]
}
dns {
reverse => [ "id.orig_h-resolved" ]
action => "replace"
}
}
}
}
filter {
if [id.resp_h] {
if ![id.resp_h-resolved] {
mutate {
add_field => [ "id.resp_h-resolved", "%{id.resp_h}" ]
}
dns {
reverse => [ "id.resp_h-resolved" ]
action => "replace"
}
}
}
}
and /etc/logstash/conf.d/30-elasticsearch-output.conf
output {
elasticsearch {
hosts => ["localhost:9200"]
manage_template => false
index => "%{[#metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[#metadata][type]}"
}
}
I've leveraged this gist and tailored it to my configuration. While running I get the following error in /var/log/logstash/logstash-plain.log:
[2016-11-06T15:30:36,961][ERROR][logstash.agent ] ########\n\t if [type] == \"bro_dhcp\" {\n\t\tgrok { \n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<mac>(.*?))\\t(?<assigned_ip>(.*?))\\t(?<lease_time>(.*?))\\t(?<trans_id>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_dns ######################\n\t if [type] == \"bro_dns\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<proto>(.*?))\\t(?<trans_id>(.*?))\\t(?<query>(.*?))\\t(?<qclass>(.*?))\\t(?<qclass_name>(.*?))\\t(?<qtype>(.*?))\\t(?<qtype_name>(.*?))\\t(?<rcode>(.*?))\\t(?<rcode_name>(.*?))\\t(?<AA>(.*?))\\t(?<TC>(.*?))\\t(?<RD>(.*?))\\t(?<RA>(.*?))\\t(?<Z>(.*?))\\t(?<answers>(.*?))\\t(?<TTLs>(.*?))\\t(?<rejected>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_software ######################\n\t if [type] == \"bro_software\" {\n\t\tgrok { \n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<bro_host>(.*?))\\t(?<host_p>(.*?))\\t(?<software_type>(.*?))\\t(?<name>(.*?))\\t(?<version.major>(.*?))\\t(?<version.minor>(.*?))\\t(?<version.minor2>(.*?))\\t(?<version.minor3>(.*?))\\t(?<version.addl>(.*?))\\t(?<unparsed_version>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_dpd ######################\n\t if [type] == \"bro_dpd\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<proto>(.*?))\\t(?<analyzer>(.*?))\\t(?<failure_reason>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_files ######################\n\t if [type] == \"bro_files\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<fuid>(.*?))\\t(?<tx_hosts>(.*?))\\t(?<rx_hosts>(.*?))\\t(?<conn_uids>(.*?))\\t(?<source>(.*?))\\t(?<depth>(.*?))\\t(?<analyzers>(.*?))\\t(?<mime_type>(.*?))\\t(?<filename>(.*?))\\t(?<duration>(.*?))\\t(?<local_orig>(.*?))\\t(?<is_orig>(.*?))\\t(?<seen_bytes>(.*?))\\t(?<total_bytes>(.*?))\\t(?<missing_bytes>(.*?))\\t(?<overflow_bytes>(.*?))\\t(?<timedout>(.*?))\\t(?<parent_fuid>(.*?))\\t(?<md5>(.*?))\\t(?<sha1>(.*?))\\t(?<sha256>(.*?))\\t(?<extracted>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_http ######################\n\t if [type] == \"bro_http\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<trans_depth>(.*?))\\t(?<method>(.*?))\\t(?<bro_host>(.*?))\\t(?<uri>(.*?))\\t(?<referrer>(.*?))\\t(?<user_agent>(.*?))\\t(?<request_body_len>(.*?))\\t(?<response_body_len>(.*?))\\t(?<status_code>(.*?))\\t(?<status_msg>(.*?))\\t(?<info_code>(.*?))\\t(?<info_msg>(.*?))\\t(?<filename>(.*?))\\t(?<http_tags>(.*?))\\t(?<username>(.*?))\\t(?<password>(.*?))\\t(?<proxied>(.*?))\\t(?<orig_fuids>(.*?))\\t(?<orig_mime_types>(.*?))\\t(?<resp_fuids>(.*?))\\t(?<resp_mime_types>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_known_certs ######################\n\t if [type] == \"bro_known_certs\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<bro_host>(.*?))\\t(?<port_num>(.*?))\\t(?<subject>(.*?))\\t(?<issuer_subject>(.*?))\\t(?<serial>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_known_hosts ######################\n\t if [type] == \"bro_known_hosts\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<bro_host>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_known_services ######################\n\t if [type] == \"bro_known_services\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<bro_host>(.*?))\\t(?<port_num>(.*?))\\t(?<port_proto>(.*?))\\t(?<service>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_ssh ######################\n\t if [type] == \"bro_ssh\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<status>(.*?))\\t(?<direction>(.*?))\\t(?<client>(.*?))\\t(?<server>(.*?))\\t(?<remote_location.country_code>(.*?))\\t(?<remote_location.region>(.*?))\\t(?<remote_location.city>(.*?))\\t(?<remote_location.latitude>(.*?))\\t(?<remote_location.longitude>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_ssl ######################\n\t if [type] == \"bro_ssl\" {\n\t\tgrok {\n\t\t match => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<version>(.*?))\\t(?<cipher>(.*?))\\t(?<server_name>(.*?))\\t(?<session_id>(.*?))\\t(?<subject>(.*?))\\t(?<issuer_subject>(.*?))\\t(?<not_valid_before>(.*?))\\t(?<not_valid_after>(.*?))\\t(?<last_alert>(.*?))\\t(?<client_subject>(.*?))\\t(?<client_issuer_subject>(.*?))\\t(?<cert_hash>(.*?))\\t(?<validation_status>(.*))\" ]\n\t\t}\n\t }\n\n\t# bro_weird ######################\n\tif [type] == \"bro_weird\" {\n\t\tgrok {\n\t\t\tmatch => [ \"message\", \"(?<ts>(.*?))\\t(?<uid>(.*?))\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t(?<name>(.*?))\\t(?<addl>(.*?))\\t(?<notice>(.*?))\\t(?<peer>(.*))\" ]\n\t\t\t}\n\t}\n\t\n\t# bro_x509 #######################\n\tif [type] == \"bro_x509\" {\n\t\tcsv {\n\t\n\t\t #x509.log:#fields\tts\tid\tcertificate.version\tcertificate.serial\tcertificate.subject\tcertificate.issuer\tcertificate.not_valid_before\tcertificate.not_valid_after\tcertificate.key_alg\tcertificate.sig_alg\tcertificate.key_type\tcertificate.key_length\tcertificate.exponent\tcertificate.curve\tsan.dns\tsan.uri\tsan.email\tsan.ip\tbasic_constraints.ca\tbasic_constraints.path_len\n\t\t columns => [\"ts\",\"id\",\"certificate.version\",\"certificate.serial\",\"certificate.subject\",\"icertificate.issuer\",\"certificate.not_valid_before\",\"certificate.not_valid_after\",\"certificate.key_alg\",\"certificate.sig_alg\",\"certificate.key_type\",\"certificate.key_length\",\"certificate.exponent\",\"certificate.curve\",\"san.dns\",\"san.uri\",\"san.email\",\"san.ip\",\"basic_constraints.ca\",\"basic_constraints.path_len\"]\n\t\n\t\t #If you use a custom delimiter, change the following value in between the quotes to your delimiter. Otherwise, leave the next line alone.\n\t\t separator => \"\t\"\n\t\t}\n\t\n\t\t#Let's convert our timestamp into the 'ts' field, so we can use Kibana features natively\n\t\tdate {\n\t\t match => [ \"ts\", \"UNIX\" ]\n\t\t}\n\t\n\t }\n\t\n\tif [type]== \"bro_intel\" {\n\t grok {\n\t\tmatch => [ \"message\", \"(?<ts>(.*?))\\t%{DATA:uid}\\t(?<id.orig_h>(.*?))\\t(?<id.orig_p>(.*?))\\t(?<id.resp_h>(.*?))\\t(?<id.resp_p>(.*?))\\t%{DATA:fuid}\\t%{DATA:file_mime_type}\\t%{DATA:file_desc}\\t(?<seen.indicator>(.*?))\\t(?<seen.indicator_type>(.*?))\\t(?<seen.where>(.*?))\\t%{NOTSPACE:sources}\" ]\n\t }\n }\n }\n date {\n\tmatch => [ \"ts\", \"UNIX\" ]\n }\n}\n\nfilter {\n if \"bro\" in [type] {\n\tif [id.orig_h] {\n\t mutate {\n\t\tadd_field => [ \"senderbase_lookup\", \"http://www.senderbase.org/lookup/?search_string=%{id.orig_h}\" ]\n\t\tadd_field => [ \"CBL_lookup\", \"http://cbl.abuseat.org/lookup.cgi?ip=%{id.orig_h}\" ]\n\t\tadd_field => [ \"Spamhaus_lookup\", \"http://www.spamhaus.org/query/bl?ip=%{id.orig_h}\" ]\n\t }\n\t}\n\tmutate {\n\t add_tag => [ \"bro\" ]\n\t}\n\tmutate {\n\t convert => [ \"id.orig_p\", \"integer\" ]\n\t convert => [ \"id.resp_p\", \"integer\" ]\n\t convert => [ \"orig_bytes\", \"integer\" ]\n\t convert => [ \"resp_bytes\", \"integer\" ]\n\t convert => [ \"missed_bytes\", \"integer\" ]\n\t convert => [ \"orig_pkts\", \"integer\" ]\n\t convert => [ \"orig_ip_bytes\", \"integer\" ]\n\t convert => [ \"resp_pkts\", \"integer\" ]\n\t convert => [ \"resp_ip_bytes\", \"integer\" ]\n\t}\n }\n}\n\nfilter {\n if [type] == \"bro_conn\" {\n\t#The following makes use of the translate filter (stash contrib) to convert conn_state into human text. Saves having to look up values for packet introspection\n\ttranslate {\n\t field => \"conn_state\"\n\t destination => \"conn_state_full\"\n\t dictionary => [ \n\t\t\"S0\", \"Connection attempt seen, no reply\",\n\t\t\"S1\", \"Connection established, not terminated\",\n\t\t\"S2\", \"Connection established and close attempt by originator seen (but no reply from responder)\",\n\t\t\"S3\", \"Connection established and close attempt by responder seen (but no reply from originator)\",\n\t\t\"SF\", \"Normal SYN/FIN completion\",\n\t\t\"REJ\", \"Connection attempt rejected\",\n\t\t\"RSTO\", \"Connection established, originator aborted (sent a RST)\",\n\t\t\"RSTR\", \"Established, responder aborted\",\n\t\t\"RSTOS0\", \"Originator sent a SYN followed by a RST, we never saw a SYN-ACK from the responder\",\n\t\t\"RSTRH\", \"Responder sent a SYN ACK followed by a RST, we never saw a SYN from the (purported) originator\",\n\t\t\"SH\", \"Originator sent a SYN followed by a FIN, we never saw a SYN ACK from the responder (hence the connection was 'half' open)\",\n\t\t\"SHR\", \"Responder sent a SYN ACK followed by a FIN, we never saw a SYN from the originator\",\n\t\t\"OTH\", \"No SYN seen, just midstream traffic (a 'partial connection' that was not later closed)\" \n\t ]\n\t}\n }\n}\n# Resolve #source_host to FQDN if possible if missing for some types of ging using source_host_ip from above\nfilter {\n if [id.orig_h] {\n\tif ![id.orig_h-resolved] {\n\t mutate {\n\t\tadd_field => [ \"id.orig_h-resolved\", \"%{id.orig_h}\" ]\n\t }\n\t dns {\n\t\treverse => [ \"id.orig_h-resolved\" ]\n\t\taction => \"replace\"\n\t }\n\t}\n }\n}\nfilter {\n if [id.resp_h] {\n\tif ![id.resp_h-resolved] {\n\t mutate {\n\t\tadd_field => [ \"id.resp_h-resolved\", \"%{id.resp_h}\" ]\n\t }\n\t dns {\n\t\treverse => [ \"id.resp_h-resolved\" ]\n\t\taction => \"replace\"\n\t }\n\t}\n }\n}\n\noutput {\n elasticsearch {\n hosts => [\"localhost:9200\"]\n #sniffing => true\n manage_template => false\n index => \"%{[#metadata][beat]}-%{+YYYY.MM.dd}\"\n document_type => \"%{[#metadata][type]}\"\n }\n}\n\n", :reason=>"Expected one of #, input, filter, output at line 158, column 3 (byte 8746) after "}
To the best of my ability I've reviewed my logstash configuration and I can't see any errors. Can anyone help me figure out whats wrong with it?
I'm running
logstash.noarch 1:5.0.0-1 #elasticsearch
elasticsearch.noarch 5.0.0-1 #elasticsearch
Many thanks
If you match the open curly brace at the top of 20-bro-ids-filter.conf, you'll see it matches with close curly brace just before your date{} stanza. That puts date{} outside the filter{}, generating the message that it's expecting input{}, output{}, or filter{}.

Filtering/Visualising JMX fields

I am successfully able to integrate JMX plugin for logstash. Now to i am trying to Visualize the JMX data.
For this i am trying to add custom fields to the parsed jmx data.
Example:
input{
beats{
port => 27080
congestion_threshold => 1500
}
jmx {
path => "file://Machine01/Users/username/projects/Logstash/logstash/bin/jmx"
polling_frequency => 15
type => "jmx"
nb_thread => 4
}
}
filter {
if [type] == "Type1"{
grok{
break_on_match => false
patterns_dir => ["C:\Users\users\projects\Logstash\logstash\bin\patterns"]
match => { "message" => "%{YEAR:Year}%{MONTHNUM:Month}%{MONTHDAY:Day} %{HOUR:Hour}%{MINUTE:Minute}%{SECOND:Second} %{LogLevel:LogVerbosity} %{MODULE:MODULENAME}%{SPACE}%{MESSAGEID:MESSAGEID} %{SUBMODULE:SUBMODULE} %{MESSAGE:MESSAGE}"}
add_field => [ "received_at", "%{#timestamp}" ]
add_field => [ "received_from", "%{host}" ]
add_tag => ["Groked"]
}
if "_grokparsefailure" in [tags] {
drop { }
}
if [type] == "jmx" {
if ("OperatingSystem.ProcessCpuLoad" in [metric_path] or "OperatingSystem.SystemCpuLoad" in [metric_path]) {
ruby {
code => "event['cpuLoad'] = event['metric_value_number'] * 100"
add_tag => [ "cpuLoad" ]
}
}
}
}
}
output {
if [type] == "jmx" {
elasticsearch {
hosts => ["http://localhost:9200"]
index => "jmx"
}
} else {
elasticsearch {
hosts => ["http://localhost:9200"]
manage_template => true
index => "%{[#metadata][beat]}-%{+YYYY.MM.dd}"
document_type => "%{[#metadata][type]}"
}
}
}
But, In KIbana it is not displaying any such newly added field, this is the data i am getting in Kibana.
#version:1
#timestamp:May 30th 2016, 18:50:36.622
host:host
path:file://Machine01/Users/username/projects/Logstash/logstash/bin/jmx
type:jmx
metric_path:OperatingSystem.ProcessCpuLoad
metric_value_number:0.003
_id:AVUB0r_4sUXN-4lFtxGq
_type:jmx
_index:jmx _score:
How can i change this to add new field which i have defined in the filter.
Also, is there any better way to visualise JMX data on Kibana.

Resources