Has anyone ever added a conditional to an input? I'm using various versions of the beats plugin. Versions less than 7.11 don't have #metadata I'd like to use two different indicies conditionally. For example,
15 beats {
16 port => "5000"
17 codec => "plain"
18 ssl => true
19 ssl_certificate_authorities => ["/etc/pki/logstash/logstashCA.pem"]
20 ssl_certificate => "/etc/pki/logstash/logstashCA.pem"
21 ssl_key => "/etc/pki/logstash/logstashCA.p8"
22 ssl_verify_mode => "force_peer"
23 if [version] not in [beat] {
24 add_field => { "target_index" => "%{[#metadata][beat]}-%{[beat]}-7-%{+YYYY.MM.dd}" }
25 }
26 add_field => { "target_index" => "%{[#metadata][beat]}-%{[beat][version]}-%{+YYYY.MM.dd}" }
27 }
No, you cannot have a conditional based on fields of the event in an input because at the time the input is built no events exit. However you can do it in the filter section
if [beat][version] {
add_field => { "target_index" => "%{[#metadata][beat]}-%{[beat][version]}-%{+YYYY.MM.dd}" }
} else {
add_field => { "target_index" => "%{[#metadata][beat]}-%{[beat]}-7-%{+YYY
}
Related
I am using Logstash to update by query existing Elasticsearch documents with an additional field that contains aggregate values extracted from Potgresql table.
I use elastichsearch output to load one index using document_id and http output to update another index that have different document_id but receving errors:
[2023-02-08T17:58:12,086][ERROR][logstash.outputs.http ][main][b64f19821b11ee0df1bd165920785876cd6c5fab079e27d39bb7ee19a3d642a4] [HTTP Output Failure] Encountered non-2xx HTTP code 400 {:response_code=>400, :url=>"http://localhost:9200/medico/_update_by_query", :event=>#LogStash::Event:0x19a14c08}
This is my pipeline configuration:
input {
jdbc {
# Postgres jdbc connection string to our database, mydb
jdbc_connection_string => "jdbc:postgresql://handel:5432/mydb"
statement_filepath => "D:\ProgrammiUnsupported\logstash-7.15.2\config\nota_sede.sql"
}
}
filter {
aggregate {
task_id => "%{idCso}"
code => "
map['idCso'] = event.get('idCso')
map['noteSede'] ||= []
map['noteSede'] << {
'id' => event.get('idNota'),
'tipo' => event.get('tipoNota'),
'descrizione' => event.get('descrizione'),
'data' => event.get('data'),
'dataInizio' => event.get('dataInizio'),
'dataFine' => event.get('dataFine')
}
event.cancel()"
push_previous_map_as_event => true
timeout => 60
timeout_tags => ['_aggregatetimeout']
}
}
}
output {
stdout { codec => rubydebug { metadata => true } }
# this works
elasticsearch {
hosts => "https://localhost:9200"
document_id => "STRUTTURA_%{idCso}"
index => "struttura"
action => "update"
user => "user"
password => "password"
ssl => true
cacert => "/usr/share/logstash/config/ca.crt"
}
http {
url => "http://localhost:9200/medico/_update_by_query"
user => "elastic"
password => "changeme"
http_method => "post"
format => "message"
content_type => "application/json"
message => '{
"query":{
"term":{
"idCso":"%{idCso}"
}
},
"script":{
"source":"ctx._source.noteSede=params.noteSede",
"lang":"painless",
"params":{
"noteSede":"%{noteSede}"
}
}
}
}'
}
}
The stdout output show me the sended docs to output like this:
{
"query" => {
"term" => {
"idCso" => "859119"
}
},
"script" => {
"source" => "ctx._source.noteSede=params.noteSede",
"lang" => "painless",
"params" => {
"noteSede" => "{dataFine=null, dataInizio=2020-02-13, descrizione=?, tipo=DB, id=6390644, data=2020-02-13 12:26:58.409},{dataFine=null, dataInizio=2020-02-13, descrizione=?, tipo=DE, id=6390645, data=2020-02-13 12:26:58.41}"
}
}
}
}
How could I set noteSede array field into message to _update_by_query ?
Hello I have this configuration for a logstash running on my computer :
input {
exec {
command => "powershell -executionpolicy unrestricted -f scripts/windows/process.ps1 command logstash"
interval => 30
type => "process_data"
codec => line
tags => [ logstash" ]
}
}
output
{
if "sometype-logs" in [tags] {
elasticsearch {
action => "index"
doc_as_upsert => true
index => "sometype-logs-%{+YYYY.MM.dd}"
hosts => "locahost:9200"
template_overwrite => true
}
} else {
elasticsearch {
action => "index"
doc_as_upsert => true
index => "%{type}"
hosts => "localhost:9200"
template_overwrite => true
}
}
When displaying indexes I have :
Why is index name is "%type" and not "process_data" ?
Probably just something about syntax. To used some field of the data, you must use this syntax
%{[somefield]}
(see example on this documentation page)
So, in your case, try this :
"%{[type]}"
in place of
"%{type}"
I am absolutely new to Logstash and I am trying to parse my multiline logentries, that are in the following format
<log level="INFO" time="Wed May 03 08:25:03 CEST 2017" timel="1493792703368" host="host">
<msg><![CDATA[Method=GET URL=http://localhost (Vers=[Version], Param1=[param1], Param2=[param1]) Result(Content-Length=[22222], Content-Type=[text/xml; charset=utf-8]) Status=200 Times=TISP:1098/CSI:-/Me:1/Total:1099]]>
</msg>
</log>
Do you know how to implement the filter in logstash config to be able to index the following fields in elasticsearch
time, host, Vers, Param1, Param2, TISP
Thank you very much
OK, I found out how to do it. This is my pipeline.conf file and it works
input {
beats {
port => 5044
}
}
filter {
xml {
store_xml => false
source => "message"
xpath => [
"/log/#level", "level",
"/log/#time", "time",
"/log/#timel", "unixtime",
"/log/#host", "host_org",
"/log/#msg", "msg",
"/log/msg/text()","msg_txt"
]
}
grok {
break_on_match => false
match => ["msg_txt", "Param1=\[(?<param1>-?\w+)\]"]
match => ["msg_txt", "Param2=\[(?<param2>-?\w+)\]"]
match => ["msg_txt", "Vers=\[(?<vers>-?\d+\.\d+)\]"]
match => ["msg_txt", "TISP:(?<tisp>-?\d+)"]
match => [unixtime, "(?<customTime>-?\d+)"]
}
if "_grokparsefailure" in [tags] {
drop { }
}
mutate {
convert => { "tisp" => "integer" }
}
date {
match => [ "customTime", "UNIX_MS"]
target => "#timestamp"
}
if "_dateparsefailure" in [tags] {
drop { }
}
}
output {
elasticsearch {
hosts => "elasticsearch:9200"
user => user
password => passwd
}
}
I want to make a filter in Logstash(version 2.4) with different matches in the same grok.
I would like to add different tags depending on the match.
Basically, I receive three different message pattern:
"##MAGIC##%message"
"##REAL##%message"
"%message"
I am trying to do is:
grok {
match => {"message" => "##MAGIC##%{GREEDYDATA:magic_message}"}
match => {"message" => "##REAL##%{GREEDYDATA:real_message}"}
match => {"message" => "%{GREEDYDATA:basic_message}"}
if [magic_message]{
overwrite => [ "message"]
add_tag => ["Magic"]
} else if [real_message]{
overwrite => [ "message"]
add_tag => ["Real"]
}else{
overwrite => [ "message"]
add_tag => ["Basic"]
}
But, I got this compile fails:
The given configuration is invalid. Reason: Expected one of #, => at line 34, column 9 (byte 900) after filter {
grok {
match => {"message" => "##MAGIC##%{GREEDYDATA:magic_message}"}
match => {"message" => "##REAL##%{GREEDYDATA:real_message}"}
match => {"message" => "%{GREEDYDATA:basic_message}"}
if {:level=>:fatal}
The logstash configuration syntax does not work like this.
This should work better (under the assumption that you want to replace message by magic_message/real_message):
grok {
match => {"message" => [ "##MAGIC##%{GREEDYDATA:magic_message}",
"##REAL##%{GREEDYDATA:real_message}",
"%{GREEDYDATA:basic_message}"]}
}
if [magic_message] {
mutate {
replace => { "message" => "%{magic_message}" }
add_tag => ["Magic"]
}
} else if [real_message] {
mutate {
replace => { "message" => "%{real_message}" }
add_tag => ["Real"]
}
} else {
mutate {
add_tag => ["Basic"]
}
}
Here is an exmple of event message:
{
"timestamp":"2016-03-29T22:35:44.770750-0400",
"flow_id":45385792,
"in_iface":"eth1",
"event_type":"alert",
"src_ip":"3.3.3.8",
"src_port":21,
"dest_ip":"2.2.2.2",
"dest_port":52934,
"proto":"TCP",
"alert":{
"action":"allowed",
"gid":1,
"signature_id":4027,
"rev":0,
"signature":"FTP Successful Login",
"category":"",
"severity":3
},
"payload":"MjU3ICIvaG9tZS9uZXd1c2VyIg0K",
"payload_printable":"257 newuser",
"stream":0,
"packet":"AFBWo0NoAFBWoxZWCABFAABJKDpAAEAGCGcDAwMIAgICAgAVzsbd4MhqOBOjfoAYAOMYcwAAAQEIChHN4EQHnwugMjU3ICIvaG9tZS9uZXd1c2VyIg0K"
}
And I'd like to be able to identify the string "newuser" (comes always after the number "257") and to create another field named user, and to add the "newuser" string into it.
My Logstash config file is the following:
input
beats
port => 5044
codec => json
type => "SuricataIDPS"
output
elasticsearch
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
index => "%{[#metadata][beat]}-%{+YYYY.MM.dd}"
#document_type => "%{[#metadata][type]}"
How can I extract the "newuser" string and to add a new field with that value?
You have to define filter stanza in your config with grok filter, see below:
input {
beats {
port => 5044
codec => json
type => "SuricataIDPS"
}
}
filter {
grok {
match => ["payload_printable", "257 (?<user>.+)"]
tag_on_failure => []
}
}
output {
elasticsearch {
hosts => ["localhost:9200"]
sniffing => true
manage_template => false
index => "%{[#metadata][beat]}-%{+YYYY.MM.dd}"
#document_type => "%{[#metadata][type]}"
}
}