Failed to install template {:message=>"Got response code '400' - logstash

I know this has been asked before but I still cannot get it working on my deployment. I am using the latest versions (8.4) of both Elasticsearch and Logstash. I want to add an index template that will load in the Logstash configuration file. Here is my output section of the Logstash configuration:
output {
elasticsearch {
hosts => ["https://192.168.1.214:9200"]
ssl => true
cacert => "/etc/logstash/certs/http_ca.crt"
user => ""
password => ""
index => "apache_elk_example"
template => "/etc/logstash/conf.d/apache_template.json"
template_name => "apache_elk_example"
template_overwrite => true
}
stdout { codec => rubydebug }
}
Here is my apache_template.json
{
"template": "apache_elk_example",
"settings": {
"index.refresh_interval": "5s"
},
"mappings": {
"_default_": {
"dynamic_templates": [
{
"message_field": {
"mapping": {
"index": "analyzed",
"omit_norms": true,
"type": "string"
},
"match_mapping_type": "string",
"match": "message"
}
},
{
"string_fields": {
"mapping": {
"index": "analyzed",
"omit_norms": true,
"type": "string",
"fields": {
"raw": {
"index": "not_analyzed",
"ignore_above": 256,
"type": "string"
}
}
},
"match_mapping_type": "string",
"match": "*"
}
}
],
"properties": {
"ip_address": {
"type": "keyword"
},
"#version": {
"index": "not_analyzed",
"type": "string"
}
},
"_all": {
"enabled": true
}
}
}
}
I will eventually add more to this but I am struggling just getting any template to load. Thanks for reading.

Related

Filter data using nodejs and elasticsearch

I'm currently facing an issue with my datatable implemented in ReactJS. I'm retrieving data from elasticsearch and populating the datatable with it. The data retrieval process works fine without the filter applied, however, when I apply filters to the data, the datatable remains empty, even though the data _source has matching records.
The structure of the parameters I am sending is as follows:
{
pageIndex: 1,
pageSize: 10,
sort: { order: '', key: '' },
query: '',
filterData: {
analysis: [ '0', '1', '2', '3' ],
threat_level_id: [ '1', '2', '3', '4' ],
}
}
EndPoint:
POST /api/v1/events/public/list
Controller:
exports.getPublicEvents = async (req, res) => {
try {
client.ping()
const { pageIndex, pageSize, sort, query, filterData } = req.body
let esQuery = {
index: 'ns_*',
body: {
query: {
bool: {
must: [
{
match_all: {},
},
],
filter: [],
},
},
from: (pageIndex - 1) * pageSize,
size: pageSize,
},
}
if (query) {
esQuery.body.query.bool.must = [
{
match: {
'Event.info': {
query: query,
fuzziness: 'AUTO',
},
},
},
]
}
if (filterData.analysis.length > 0) {
esQuery.body.query.bool.filter.push({
terms: {
'Event.analysis': filterData.analysis,
},
})
}
if (filterData.threat_level_id.length > 0) {
esQuery.body.query.bool.filter.push({
terms: {
'Event.threat_level_id': filterData.threat_level_id,
},
})
}
let esResponse = await client.search(esQuery)
let data = esResponse.hits.hits.map((hit) => hit._source)
let total = esResponse.hits.total.value
res.status(200).json({
status: 'success',
data: data,
total: total,
})
} catch (error) {
res.status(500).json({
error: 'Error connecting to Elasticsearch',
errorMessage: error.message,
})
}
}
The controller below is without filters and it works just fine.
exports.getPublicEvents = async (req, res) => {
try {
client.ping()
const { pageIndex, pageSize, sort, query } = req.body
let esQuery = {
index: 'ns_*',
body: {
query: {
match_all: {},
},
from: (pageIndex - 1) * pageSize,
size: pageSize,
},
}
if (query) {
esQuery.body.query = {
match: {
'Event.info': {
query: query,
fuzziness: 'AUTO',
},
},
}
}
let esResponse = await client.search(esQuery)
let data = esResponse.hits.hits.map((hit) => hit._source)
let total = esResponse.hits.total.value
res.status(200).json({
status: 'success',
data: data,
total: total,
})
} catch (error) {
res.status(500).json({
error: 'Error connecting to Elasticsearch',
errorMessage: error.message,
})
}
}
ElasticSearech version: 7.17.8
Result of: console.log(JSON.stringify(esQuery))
{
"index": "INDEX_NAME",
"body": {
"query": {
"bool": {
"must": [{ "match_all": {} }],
"filter": [
{ "terms": { "Event.analysis": ["0", "1", "2"] } },
{ "terms": { "Event.threat_level_id": ["1", "2", "3", "4"] } }
]
}
},
"from": 0,
"size": 10
}
}
Data in elascticsearch schema
{
"#version": "1",
"#timestamp": "2023-02-01T14:43:09.997Z",
"Event": {
"info": ".......................",
"description": ".......................",
"analysis": 0,
"threat_level_id": "4",
"created_at": 1516566351,
"uuid": "5a64f74f0e543738c12bc973322",
"updated_at": 1675262417
}
}
Index Mapping
{
"index_patterns": ["INDEX_NAME"],
"template": "TEMPLATE_NAME",
"settings": {
"number_of_replicas": 0,
"index.mapping.nested_objects.limit": 10000000
},
"mappings": {
"dynamic": false,
"properties": {
"#timestamp": {
"type": "date"
},
"Event": {
"type": "nested",
"properties": {
"date_occured": {
"type": "date"
},
"threat_level_id": {
"type": "integer"
},
"description": {
"type": "text"
},
"is_shared": {
"type": "boolean"
},
"analysis": {
"type": "integer"
},
"uuid": {
"type": "text"
},
"created_at": {
"type": "date"
},
"info": {
"type": "text"
},
"shared_with": {
"type": "nested",
"properties": {
"_id": {
"type": "text"
}
}
},
"updated_at": {
"type": "date"
},
"author": {
"type": "text"
},
"Attributes": {
"type": "nested",
"properties": {
"data": {
"type": "text"
},
"type": {
"type": "text"
},
"uuid": {
"type": "text"
},
"comment": {
"type": "text"
},
"category": {
"type": "text"
},
"value": {
"type": "text"
},
"timestamp": {
"type": "date"
}
}
},
"organisation": {
"type": "nested",
"properties": {
"name": {
"type": "text"
},
"uuid": {
"type": "text"
}
}
},
"Tags": {
"type": "nested",
"properties": {
"color": {
"type": "text"
},
"name": {
"type": "text"
}
}
},
"TLP": {
"type": "nested",
"properties": {
"color": {
"type": "text"
},
"name": {
"type": "text"
}
}
}
}
}
}
}
}
Event is a nested field, so you need to use nested queries, like this:
{
"index": "INDEX_NAME",
"body": {
"query": {
"bool": {
"must": [{ "match_all": {} }],
"filter": [
{
"nested": {
"path": "Event",
"query": {"terms": { "Event.analysis": ["0", "1", "2"] }}
}
},
{
"nested": {
"path": "Event",
"query": {"terms": { "Event.threat_level_id": ["1", "2", "3", "4"] }}
}
}
]
}
},
"from": 0,
"size": 10
}
}

Using JSON descriptors how to define an array in gRPC?

I'm using JSON descriptors instead of proto format. Everithing works, unless the array of Todo. I need an array of Todos.
How define that? I put the "type": "array", but always return the error:
'Error: no such Type or Enum 'array' in Type .Todos'
My json file is like this:
const todo = {
"nested": {
"Services": {
"methods": {
"createTodo": {
"requestType": "Todo",
"requestStream": false,
"responseType": "Todo",
"responseStream": false
},
"readTodos": {
"requestType": "voidNoParam",
"requestStream": false,
"responseType": "Todos",
"responseStream": false
},
"readTodosStream": {
"requestType": "voidNoParam",
"requestStream": false,
"responseType": "Todo",
"responseStream": true
}
}
},
"Todo": {
"fields": {
"id": {
"type": "int32",
"id": 1
},
"text": {
"type": "string",
"id": 2
}
}
},
"Todos": {
"fields": {
"items": {
"type": "array",
"id": 1
}
}
},
"voidNoParam": {
"fields": {}
}
}
}
module.exports = todo
I found the problem, really simple.
"Todos": {
"fields": {
"items": {
"rule": "repeated",
"type": "Todo",
"id": 1
}
}
},

No result returned from the nest c# elasticsearch query

I am indexing an attachment field. The POST query in sense returns expected resultset.
My query is
POST /mydocs/_search
{
"query" : {
"bool" : {
"must" : [
{ "match" : { "file.content":"abc"} },
{ "match":{"otherDetails":"asd"}},
{ "match" : { "filePermissionInfo.accountValue" : "xyz"} }
]
}
}
}
I need to convert it to a c# Nest code. I tried converting it, but its not returning any result,even it contains data. If I remove the
m.Match(mt1 => mt1.Field(f1 => f1.File.Coontent).Query(queryTerm))
from the below experssion, it returns a result set. Is there any problem with the attachement field?
client.Search<IndexDocument>(s => s
.Index("mydocs")
.Query(q => q
.Bool(b => b
.Must(m =>
m.Match(mt1 => mt1.Field(f1 => f1.File.Coontent).Query(queryTerm)) &&
m.Match(mt2 => mt2.Field(f2 => f2.FilePermissionInfo.First().SecurityIdValue).Query(accountName)) &&
m.Match(mt3 => mt3.Field(f3 => f3.OtherDetails).Query(other))
)))
);
My mapping is
{
"mydocs": {
"mappings": {
"indexdocument": {
"properties": {
"docLocation": {
"type": "string",
"index": "not_analyzed",
"store": true
},
"documentType": {
"type": "string",
"store": true
},
"file": {
"type": "attachment",
"fields": {
"content": {
"type": "string",
"term_vector": "with_positions_offsets",
"analyzer": "full"
},
"author": {
"type": "string"
},
"title": {
"type": "string",
"term_vector": "with_positions_offsets",
"analyzer": "full"
},
"name": {
"type": "string"
},
"date": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"keywords": {
"type": "string"
},
"content_type": {
"type": "string"
},
"content_length": {
"type": "integer"
},
"language": {
"type": "string"
}
}
},
"filePermissionInfo": {
"properties": {
"fileSystemRights": {
"type": "string",
"store": true
},
"securityIdValue": {
"type": "string",
"store": true
}
}
},
"id": {
"type": "double",
"store": true
},
"lastModifiedDate": {
"type": "date",
"store": true,
"format": "strict_date_optional_time||epoch_millis"
},
"otherDetails": {
"type": "string"
},
"title": {
"type": "string",
"store": true,
"term_vector": "with_positions_offsets"
}
}
}
}
}
}
It looks like the query hasn't been translated to NEST correctly. In the query you have
"filePermissionInfo.accountValue"
but in the NEST query, you only have
f2 => f2.FilePermissionInfo
which would result only in filePermissionInfo. You need to change this to
f2 => f2.FilePermissionInfo.AccountValue

How to apply filter on geo coordinates in elasticsearch?

I am using elasticsearch with mongoosastic npm module. I am trying to apply filter on geo coordinates having following model structure
geoLocation: {
type: {
type: String,
default: 'Point'
},
coordinates: [Number] //orders should be lat,lng
}
with the mapping as follows
{
"events": {
"settings": {
"analysis": {
"filter": {
"edgeNGram_filter": {
"type": "edgeNGram",
"min_gram": 1,
"max_gram": 50,
"side": "front"
}
},
"analyzer": {
"edge_nGram_analyzer": {
"type": "custom",
"tokenizer": "edge_ngram_tokenizer",
"filter": [
"lowercase",
"asciifolding",
"edgeNGram_filter"
]
},
"whitespace_analyzer": {
"type": "custom",
"tokenizer": "whitespace",
"filter": [
"lowercase",
"asciifolding"
]
}
},
"tokenizer": {
"edge_ngram_tokenizer": {
"type": "edgeNGram",
"min_gram": "1",
"max_gram": "50",
"token_chars": [
"letter",
"digit"
]
}
}
}
},
"mappings": {
"event": {
"_all": {
"index_analyzer": "nGram_analyzer",
"search_analyzer": "whitespace_analyzer"
},
"properties": {
"title": {
"type": "string",
"index": "not_analyzed"
},
"geoLocation": {
"index": "not_analyzed",
"type": "geo_point"
}
}
}
}
}
}
Query
{
"query": {
"multi_match": {
"query": "the",
"fields": ["title", ]
}
},
"filter" : {
"geo_distance" : {
"distance" : "200km",
"geoLocation.coordinates" : {
"lat" : 19.007452,
"lon" : 72.831556
}
}
}
}
I am unable to create indexing on the geo coordinates with the following model structure, I dont understand if it is not possible to index geo coordinates with the above model structure because in my case the coordinates has order as lat,long and I have found somewhere that elasticsearch expects coordinates order as long,lat.
Error
Error: SearchPhaseExecutionException[Failed to execute phase [query],
all shards failed; shardFailures {[CDHdgtJnTbeu8tl2mDfllg][events][0]:
SearchParseException[[events][0]: from[-1],size[-1]: Parse Failure
[Failed to parse source
curl -XGET localhost:9200/events
{
"events": {
"aliases": {},
"mappings": {
"1": {
"properties": {
"location": {
"type": "double"
},
"text": {
"type": "string"
}
}
},
"event": {
"properties": {
"city": {
"type": "string"
},
"endTime": {
"type": "date",
"format": "dateOptionalTime"
},
"geo_with_lat_lon": {
"type": "geo_point",
"lat_lon": true
},
"isActive": {
"type": "boolean"
},
"isRecommended": {
"type": "boolean"
},
"location": {
"type": "string"
},
"title": {
"type": "string"
}
}
}
},
"settings": {
"index": {
"creation_date": "1461675012489",
"uuid": "FT-xVUdPQtyuKFm4J4Rd7g",
"number_of_replicas": "1",
"number_of_shards": "5",
"events": {
"mappings": {
"event": {
"_all": {
"enabled": "false",
"search_analyzer": "whitespace_analyzer",
"index_analyzer": "nGram_analyzer"
},
"properties": {
"geoLocation": {
"coordinates": {
"type": "geo_shape",
"index": "not_analyzed"
}
},
"location": {
"type": "string",
"index": "not_analyzed"
},
"title": {
"type": "string",
"index": "not_analyzed"
},
"geo_with_lat_lon": {
"type": "geo_point",
"lat_lon": "true",
"index": "not_analyzed"
}
}
}
},
"settings": {
"analysis": {
"analyzer": {
"edge_nGram_analyzer": {
"type": "custom",
"filter": [
"lowercase",
"asciifolding",
"edgeNGram_filter"
],
"tokenizer": "edge_ngram_tokenizer"
},
"whitespace_analyzer": {
"type": "custom",
"filter": [
"lowercase",
"asciifolding"
],
"tokenizer": "whitespace"
}
},
"filter": {
"edgeNGram_filter": {
"max_gram": "50",
"type": "edgeNGram",
"min_gram": "1",
"side": "front"
}
},
"tokenizer": {
"edge_ngram_tokenizer": {
"max_gram": "50",
"type": "edgeNGram",
"min_gram": "1",
"token_chars": [
"letter",
"digit"
]
}
}
}
}
},
"version": {
"created": "1070099"
}
}
},
"warmers": {}
}
}
I got a solution for my question
Mapping
PUT /geo_test
{
"mappings": {
"type_test": {
"properties": {
"name": {
"type": "string"
},
"geoLocation": {
"type": "nested",
"properties": {
"coordinates": {
"type": "geo_point",
"lat_lon": true
}
}
}
}
}
}
}
Query
POST /geo_test/type_test/_search
{
"query": {
"filtered": {
"filter": {
"nested": {
"path": "geoLocation",
"query": {
"filtered": {
"filter": {
"geo_distance": {
"distance": 5,
"distance_unit": "km",
"geoLocation.coordinates": {
"lat": 41.12,
"lon": -71.34
}
}
}
}
}
}
}
}
}
}

ElasticSearch query stops working with big amount of data

The problem: I have 2 identical in terms of settings and mappings indexes.
The first index contains only 1 document.
The second index contains the same document + 16M of others.
When I'm running the query on the first index it returns the document, but when I do the same query on the second — I receive nothing.
Indexes settings:
{
"tasks_test": {
"settings": {
"index": {
"analysis": {
"analyzer": {
"tag_analyzer": {
"filter": [
"lowercase",
"tag_filter"
],
"tokenizer": "whitespace",
"type": "custom"
}
},
"filter": {
"tag_filter": {
"type": "word_delimiter",
"type_table": "# => ALPHA"
}
}
},
"creation_date": "1444127141035",
"number_of_replicas": "2",
"number_of_shards": "5",
"uuid": "wTe6WVtLRTq0XwmaLb7BLg",
"version": {
"created": "1050199"
}
}
}
}
}
Mappings:
{
"tasks_test": {
"mappings": {
"Task": {
"dynamic": "false",
"properties": {
"format": "dateOptionalTime",
"include_in_all": false,
"type": "date"
},
"is_private": {
"type": "boolean"
},
"last_timestamp": {
"type": "integer"
},
"name": {
"analyzer": "tag_analyzer",
"type": "string"
},
"project_id": {
"include_in_all": false,
"type": "integer"
},
"user_id": {
"include_in_all": false,
"type": "integer"
}
}
}
}
}
The document:
{
"_index": "tasks_test",
"_type": "Task",
"_id": "1",
"_source": {
"is_private": false,
"name": "135548- test with number",
"project_id": 2,
"user_id": 1
}
}
The query:
{
"query": {
"filtered": {
"query": {
"bool": {
"must": [
[
{
"match": {
"_all": {
"query": "135548",
"type": "phrase_prefix"
}
}
}
]
]
}
},
"filter": {
"bool": {
"must": [
{
"term": {
"is_private": false
}
},
{
"terms": {
"project_id": [
2
]
}
},
{
"terms": {
"user_id": [
1
]
}
}
]
}
}
}
}
}
Also, some findings:
if I replace _all with name everything works
if I replace match_phrase_prefix with match_phrase works too
ES version: 1.5.1
So, the question is: how to make the query work for the second index without mentioned hacks?

Resources