Logstash configuration for word extraction - logstash

I am new to Logstash manipulations and I have no idea how to do the below.
I have a sample data as below:
Column:Type
Incident Response P3
Incident Resolution L1.5 P2
...
I want to extract the word 'Response' and 'Resolution' into a new column 'SLA type'
Im looking for something very alike to the below SQL statement:
case when Type like '%Resolution%' then Resolution
when Type like '%Response%' then Response
end as SLA_Type
How do i manipulate this in Logstash?
Below is my conf. I'm using an API input.
input {
http_poller {
urls => {
snowinc => {
url => "https://service-now.com"
user => "your_user"
password => "yourpassword"
headers => {Accept => "application/json"}
}
}
request_timeout => 60
metadata_target => "http_poller_metadata"
schedule => { cron => "* * * * * UTC"}
codec => "json"
}
}
filter
{
json {source => "result" }
split{ field => ["result"] }
date {
match => ["[result][sys_created_on]","yyyy-MM-dd HH:mm:ss"]
target => "sys_created_on"
}
}
output {
elasticsearch {
hosts => ["yourelastuicIP"]
index => "incidentsnow"
action=>update
document_id => "%{[result][number]}"
doc_as_upsert =>true
}
stdout { codec => rubydebug }
}
The output for the API json url looks like the below:
{"result":[
{
"made_sla":"true",
"Type":"incident resolution p3",
"sys_updated_on":"2019-12-23 05:00:00",
"number":"INC0010275",
"category":"Network"} ,
{
"made_sla":"true",
"Type":"incident resolution l1.5 p4",
"sys_updated_on":"2019-12-24 07:00:00",
"number":"INC0010567",
"category":"DB"}]}

You can use the following filter block in your pipeline to add a new field if a word is present in another field.
if "response" in [Type] {
mutate {
add_field => { "SLA_Type" => "Response" }
}
}
if "resolution" in [Type] {
mutate {
add_field => { "SLA_Type" => "Resolution" }
}
}
If the word response is present in the field Type a new field named SLA_Type with the value Response will be added to your document, the same in will happen with resolution.

Related

How to map array inside message in Logstash HTTP Output

I am using Logstash to update by query existing Elasticsearch documents with an additional field that contains aggregate values extracted from Potgresql table.
I use elastichsearch output to load one index using document_id and http output to update another index that have different document_id but receving errors:
[2023-02-08T17:58:12,086][ERROR][logstash.outputs.http ][main][b64f19821b11ee0df1bd165920785876cd6c5fab079e27d39bb7ee19a3d642a4] [HTTP Output Failure] Encountered non-2xx HTTP code 400 {:response_code=>400, :url=>"http://localhost:9200/medico/_update_by_query", :event=>#LogStash::Event:0x19a14c08}
This is my pipeline configuration:
input {
jdbc {
# Postgres jdbc connection string to our database, mydb
jdbc_connection_string => "jdbc:postgresql://handel:5432/mydb"
statement_filepath => "D:\ProgrammiUnsupported\logstash-7.15.2\config\nota_sede.sql"
}
}
filter {
aggregate {
task_id => "%{idCso}"
code => "
map['idCso'] = event.get('idCso')
map['noteSede'] ||= []
map['noteSede'] << {
'id' => event.get('idNota'),
'tipo' => event.get('tipoNota'),
'descrizione' => event.get('descrizione'),
'data' => event.get('data'),
'dataInizio' => event.get('dataInizio'),
'dataFine' => event.get('dataFine')
}
event.cancel()"
push_previous_map_as_event => true
timeout => 60
timeout_tags => ['_aggregatetimeout']
}
}
}
output {
stdout { codec => rubydebug { metadata => true } }
# this works
elasticsearch {
hosts => "https://localhost:9200"
document_id => "STRUTTURA_%{idCso}"
index => "struttura"
action => "update"
user => "user"
password => "password"
ssl => true
cacert => "/usr/share/logstash/config/ca.crt"
}
http {
url => "http://localhost:9200/medico/_update_by_query"
user => "elastic"
password => "changeme"
http_method => "post"
format => "message"
content_type => "application/json"
message => '{
"query":{
"term":{
"idCso":"%{idCso}"
}
},
"script":{
"source":"ctx._source.noteSede=params.noteSede",
"lang":"painless",
"params":{
"noteSede":"%{noteSede}"
}
}
}
}'
}
}
The stdout output show me the sended docs to output like this:
{
"query" => {
"term" => {
"idCso" => "859119"
}
},
"script" => {
"source" => "ctx._source.noteSede=params.noteSede",
"lang" => "painless",
"params" => {
"noteSede" => "{dataFine=null, dataInizio=2020-02-13, descrizione=?, tipo=DB, id=6390644, data=2020-02-13 12:26:58.409},{dataFine=null, dataInizio=2020-02-13, descrizione=?, tipo=DE, id=6390645, data=2020-02-13 12:26:58.41}"
}
}
}
}
How could I set noteSede array field into message to _update_by_query ?

Logstash Not Recognizing The Lat/Lon fileds in Json Format

I have fields like A_Latitude, A_Longitude, B_Latitude and B_Longitude. I would like to make use of this data and create Maps in Kibana. The problem is data is getting into elasticsearch, but the gejson columns created in Logstash filter not gettin recognized and data is not being fed into geo_point1 and geo_point2.
Hence, first created a geo_point mapping in Kibana dev tools as follows,
PUT cc-test
{
"mappings": {
"properties": {
"geo_point1":{
"type": "geo_point"
},
"geo_point2":{
"type": "geo_point"
}
}
}
}
I have configured my logstash config file the following way,
input {
jdbc {
# Postgres jdbc connection string to our database, mydb
jdbc_connection_string => "some string"
# The user we wish to execute our statement as
jdbc_user => "User"
jdbc_password => "Password"
# The path to our downloaded jdbc driver
jdbc_driver_library => "/apps/ELK/logstash/driver/ngdbc-2.4.56.jar"
jdbc_driver_class => "com.sap.db.jdbc.Driver"
# our query
#jdbc_validate_connection => true
#schedule => "* * * * *"
#record_last_run => true
# last_run_metadata_path => "login.txt"
statement => "SELECT
inputdata.A_LATITUDE, inpudata.A_LONGITUDE, inputdata.B_LATITUDE,
inputdata.B_LONGITUDE, outputdata.BANDWIDTH, inputdata.SEQUENCEID,
inputdata.REQUESTTIMESTAMP
FROM inputdata, outputdata
WHERE
inputdata.SEQUENCEID = outputdata.SEQUENCEID
AND inputdata.REQUEST_TIMESTAMP >= '2019-01-01 00:00:00'
AND inputdata.SEQUENCEID IS NOT NULL
AND inputdata.SEQUENCEID NOT IN ('N/A')
ORDER BY inputdata.SEQUENCEID DESC "
# jdbc_paging_enabled => "true"
# jdbc_page_size => "10000"
}
}
filter {
mutate {
convert => { "A_LONGITUDE" => "float" }
convert => { "A_LATITUDE" => "float" }
convert => { "B_LONGITUDE" => "float" }
convert => { "B_LATITUDE" => "float" }
}
mutate {
rename => {
"A_LONGITUDE" => "[geo_point1][lon]"
"A_LATITUDE" => "[geo_point1][lat]"
}
}
mutate {
rename => {
"B_LONGITUDE" => "[geo_point2][lon]"
"B_LATITUDE" => "[geo_point2][lat]"
}
}
}
output {
elasticsearch {
hosts => ["http://some server"]
index => "cc-test"
#document_type => "system_logs"
user => "Username"
password => "Password"
}
stdout { codec => rubydebug }
}
Don't understand what is wrong with the Filter part and why data is not getting into the columns geo_point1 and geo_point2!!
Somebody please help :pray::pray::pray:

Can't create a field with a variable from a grok match regex

I am currently using logstash, elasticsearch and kibana 6.3.0
My log are generated at a unique id path : /tmp/USER_DATA/FactoryContainer/images/(my unique id)/oar/oar_image_job(my unique id).stdout
What I want to do is to match this unique id and to create a field with this id.
I m a bit novice to logstash filter but I don't know why it doesn't want to use my uid and always return me %{uid} in my field or this Failed to execute action error.
my filter :
input {
file {
path => "/tmp/USER_DATA/FactoryContainer/images/*/oar/oar_image_job*.stdout"
start_position => "beginning"
add_field => { "data_source" => "oar-image-job" }
}
}
filter {
grok {
match => ["path","%{UNIXPATH}%{NUMBER:uid}%{UNIXPATH}"]
}
mutate {
add_field => [ "unique_id" => "%{uid}" ]
}
}
output {
if [data_source] == "oar-image-job" {
elasticsearch {
index => "oar-image-job-%{+YYYY.MM.dd}"
hosts => ["localhost:9200"]
}
}
}
the data_source field is to avoid this issue: When you put multiple config files in a directory for Logstash to use, they will all be concatenated
in the grok debugger %{UNIXPATH}%{NUMBER:uid}%{UNIXPATH} my path return me the good value
link to the solution : https://discuss.elastic.co/t/cant-create-a-field-with-a-variable-from-a-grok-match-regex/142613/7?u=thesmartmonkey
the correct filter :
input {
file {
path => "/tmp/USER_DATA/FactoryContainer/images/*/oar/oar_image_job*.stdout"
start_position => "beginning"
add_field => { "data_source" => "oar-image-job" }
}
}
filter {
grok {
match => { "path" => [ "/tmp/USER_DATA/FactoryContainer/images/%{DATA:unique_id}/oar/oar_image_job%{DATA}.stdout" ] }
}
}
output {
if [data_source] == "oar-image-job" {
elasticsearch {
index => "oar-image-job-%{+YYYY.MM.dd}"
hosts => ["localhost:9200"]
}
}
}

GROK custom pattern filter in logstash

How to create a grok custom pattern filter in logstash?
I want to create a pattern for http response status code
here is my pattern code
STATUS_CODE __ %{NONNEGINT} __
what I reaaly want to do is to have all of my web server hits with user IP and request http headers and payload and also web servers's response.
and here is my logstash.conf
input {
file {
type => "kpi-success"
path => "/var/log/kpi_success.log"
start_position => beginning
}
}
filter {
if [type] == "kpi-success" {
grok {
patterns_dir => ["./patterns"]
match => { "message" => "%{TIMESTAMP_ISO8601:timestamp} %{GREEDYDATA:message} "}
}
multiline {
pattern => "^\["
what => "previous"
negate => true
}
mutate{
add_field => {
"statusCode" => "[STATUS_CODE]"
}
}
}
}
output {
if [type] == "kpi-success" {
elasticsearch {
hosts => "elasticsearch:9200"
index => "kpi-success-%{+YYYY.MM.dd}"
}
}
}
You don't have to use a custom pattern file, you can define a new one directly in the filter.
grok {
match => { "message" => "(?<STATUS_CODE>__ %{NONNEGINT} __)"}
}

Issue in renaming Json parsed field in Logstash

I am parsing json log file in Logstash. There is a field named #person.name. I tried to rename this field name before sending it to elasticsearch. I also tried to remove the field but I couldn't remove or delete that field because of that my data not getting indexed in Elasticsearch.
Error recorded in elasticsearch
MapperParsingException[Field name [#person.name] cannot contain '.']
at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parseProperties(ObjectMapper.java:276)
at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parseObjectOrDocumentTypeProperties(ObjectMapper.java:221)
at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parse(ObjectMapper.java:196)
at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parseProperties(ObjectMapper.java:308)
at org.elasticsearch.index.mapper.object.ObjectMapper$TypeParser.parseObjectOrDocumentTypeProperties(ObjectMapper.java:221)
at org.elasticsearch.index.mapper.object.RootObjectMapper$TypeParser.parse(RootObjectMapper.java:138)
at org.elasticsearch.index.mapper.DocumentMapperParser.parse(DocumentMapperParser.java:119)
at org.elasticsearch.index.mapper.DocumentMapperParser.parse(DocumentMapperParser.java:100)
at org.elasticsearch.index.mapper.MapperService.parse(MapperService.java:435)
at org.elasticsearch.cluster.metadata.MetaDataMappingService$PutMappingExecutor.applyRequest(MetaDataMappingService.java:257)
at org.elasticsearch.cluster.metadata.MetaDataMappingService$PutMappingExecutor.execute(MetaDataMappingService.java:230) at org.elasticsearch.cluster.service.InternalClusterService.runTasksForExecutor(InternalClusterService.java:458)
at org.elasticsearch.cluster.service.InternalClusterService$UpdateTask.run(InternalClusterService.java:762)
My Logstash config
input {
beats {
port => 11153
}
}
filter
{
if [type] == "person_get" {
##Parsing JSON input to JSON Filter..
json {
source => "message"
}
mutate{
rename => { "#person.name" => "#person-name" }
remove_field => [ "#person.name"]
}
fingerprint {
source => ["ResponseTimestamp"]
target => "fingerprint"
key => "78787878"
method => "SHA1"
concatenate_sources => true
}
}
}
output{
if [type] == "person_get" {
elasticsearch {
index => "logstash-person_v1"
hosts => ["xxx.xxx.xx:9200"]
document_id => "%{fingerprint}" # !!! prevent duplication
}
stdout {
codec => rubydebug
}
} }

Resources