Using JSON descriptors how to define an array in gRPC? - node.js

I'm using JSON descriptors instead of proto format. Everithing works, unless the array of Todo. I need an array of Todos.
How define that? I put the "type": "array", but always return the error:
'Error: no such Type or Enum 'array' in Type .Todos'
My json file is like this:
const todo = {
"nested": {
"Services": {
"methods": {
"createTodo": {
"requestType": "Todo",
"requestStream": false,
"responseType": "Todo",
"responseStream": false
},
"readTodos": {
"requestType": "voidNoParam",
"requestStream": false,
"responseType": "Todos",
"responseStream": false
},
"readTodosStream": {
"requestType": "voidNoParam",
"requestStream": false,
"responseType": "Todo",
"responseStream": true
}
}
},
"Todo": {
"fields": {
"id": {
"type": "int32",
"id": 1
},
"text": {
"type": "string",
"id": 2
}
}
},
"Todos": {
"fields": {
"items": {
"type": "array",
"id": 1
}
}
},
"voidNoParam": {
"fields": {}
}
}
}
module.exports = todo

I found the problem, really simple.
"Todos": {
"fields": {
"items": {
"rule": "repeated",
"type": "Todo",
"id": 1
}
}
},

Related

Filter data using nodejs and elasticsearch

I'm currently facing an issue with my datatable implemented in ReactJS. I'm retrieving data from elasticsearch and populating the datatable with it. The data retrieval process works fine without the filter applied, however, when I apply filters to the data, the datatable remains empty, even though the data _source has matching records.
The structure of the parameters I am sending is as follows:
{
pageIndex: 1,
pageSize: 10,
sort: { order: '', key: '' },
query: '',
filterData: {
analysis: [ '0', '1', '2', '3' ],
threat_level_id: [ '1', '2', '3', '4' ],
}
}
EndPoint:
POST /api/v1/events/public/list
Controller:
exports.getPublicEvents = async (req, res) => {
try {
client.ping()
const { pageIndex, pageSize, sort, query, filterData } = req.body
let esQuery = {
index: 'ns_*',
body: {
query: {
bool: {
must: [
{
match_all: {},
},
],
filter: [],
},
},
from: (pageIndex - 1) * pageSize,
size: pageSize,
},
}
if (query) {
esQuery.body.query.bool.must = [
{
match: {
'Event.info': {
query: query,
fuzziness: 'AUTO',
},
},
},
]
}
if (filterData.analysis.length > 0) {
esQuery.body.query.bool.filter.push({
terms: {
'Event.analysis': filterData.analysis,
},
})
}
if (filterData.threat_level_id.length > 0) {
esQuery.body.query.bool.filter.push({
terms: {
'Event.threat_level_id': filterData.threat_level_id,
},
})
}
let esResponse = await client.search(esQuery)
let data = esResponse.hits.hits.map((hit) => hit._source)
let total = esResponse.hits.total.value
res.status(200).json({
status: 'success',
data: data,
total: total,
})
} catch (error) {
res.status(500).json({
error: 'Error connecting to Elasticsearch',
errorMessage: error.message,
})
}
}
The controller below is without filters and it works just fine.
exports.getPublicEvents = async (req, res) => {
try {
client.ping()
const { pageIndex, pageSize, sort, query } = req.body
let esQuery = {
index: 'ns_*',
body: {
query: {
match_all: {},
},
from: (pageIndex - 1) * pageSize,
size: pageSize,
},
}
if (query) {
esQuery.body.query = {
match: {
'Event.info': {
query: query,
fuzziness: 'AUTO',
},
},
}
}
let esResponse = await client.search(esQuery)
let data = esResponse.hits.hits.map((hit) => hit._source)
let total = esResponse.hits.total.value
res.status(200).json({
status: 'success',
data: data,
total: total,
})
} catch (error) {
res.status(500).json({
error: 'Error connecting to Elasticsearch',
errorMessage: error.message,
})
}
}
ElasticSearech version: 7.17.8
Result of: console.log(JSON.stringify(esQuery))
{
"index": "INDEX_NAME",
"body": {
"query": {
"bool": {
"must": [{ "match_all": {} }],
"filter": [
{ "terms": { "Event.analysis": ["0", "1", "2"] } },
{ "terms": { "Event.threat_level_id": ["1", "2", "3", "4"] } }
]
}
},
"from": 0,
"size": 10
}
}
Data in elascticsearch schema
{
"#version": "1",
"#timestamp": "2023-02-01T14:43:09.997Z",
"Event": {
"info": ".......................",
"description": ".......................",
"analysis": 0,
"threat_level_id": "4",
"created_at": 1516566351,
"uuid": "5a64f74f0e543738c12bc973322",
"updated_at": 1675262417
}
}
Index Mapping
{
"index_patterns": ["INDEX_NAME"],
"template": "TEMPLATE_NAME",
"settings": {
"number_of_replicas": 0,
"index.mapping.nested_objects.limit": 10000000
},
"mappings": {
"dynamic": false,
"properties": {
"#timestamp": {
"type": "date"
},
"Event": {
"type": "nested",
"properties": {
"date_occured": {
"type": "date"
},
"threat_level_id": {
"type": "integer"
},
"description": {
"type": "text"
},
"is_shared": {
"type": "boolean"
},
"analysis": {
"type": "integer"
},
"uuid": {
"type": "text"
},
"created_at": {
"type": "date"
},
"info": {
"type": "text"
},
"shared_with": {
"type": "nested",
"properties": {
"_id": {
"type": "text"
}
}
},
"updated_at": {
"type": "date"
},
"author": {
"type": "text"
},
"Attributes": {
"type": "nested",
"properties": {
"data": {
"type": "text"
},
"type": {
"type": "text"
},
"uuid": {
"type": "text"
},
"comment": {
"type": "text"
},
"category": {
"type": "text"
},
"value": {
"type": "text"
},
"timestamp": {
"type": "date"
}
}
},
"organisation": {
"type": "nested",
"properties": {
"name": {
"type": "text"
},
"uuid": {
"type": "text"
}
}
},
"Tags": {
"type": "nested",
"properties": {
"color": {
"type": "text"
},
"name": {
"type": "text"
}
}
},
"TLP": {
"type": "nested",
"properties": {
"color": {
"type": "text"
},
"name": {
"type": "text"
}
}
}
}
}
}
}
}
Event is a nested field, so you need to use nested queries, like this:
{
"index": "INDEX_NAME",
"body": {
"query": {
"bool": {
"must": [{ "match_all": {} }],
"filter": [
{
"nested": {
"path": "Event",
"query": {"terms": { "Event.analysis": ["0", "1", "2"] }}
}
},
{
"nested": {
"path": "Event",
"query": {"terms": { "Event.threat_level_id": ["1", "2", "3", "4"] }}
}
}
]
}
},
"from": 0,
"size": 10
}
}

Adding index to mappings in MongoDB

Hey I have a problem with my mongoDb search. I need to add one more index to my search for better result. But since its not in mappings whenever i perform my search as following it returns me nothing.
`
embeddedDocument: {
path: 'produces',
operator: {
compound: {
"must": [
{
autocomplete: {
"path": "produces.name",
"query": val,
"fuzzy": {
"maxEdits": 2,
"prefixLength": 3
}
}
},
{
equals: { path: 'produces.deleted', value: false }
}
],
}
}
}
and here is my index definition
{
"mappings": {
"dynamic": false,
"fields": {
"companyName": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
],
"produces": {
"type": "embeddedDocuments",
"fields": {
"name": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
]
}
},
"status": {
"type": "string"
}
}
}
}
So i need to update my index definition as following.
{
"mappings": {
"dynamic": false,
"fields": {
"companyName": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
],
"produces": {
"type": "embeddedDocuments",
"fields": {
"deleted": {
"type": "boolean"
},
"name": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
]
}
},
"status": {
"type": "string"
}
}
}
}
`
But my problem is i dont know where to add this index definition on the mongoDB side so i can filter with deleted false criteria.
I tried to add index to mongoDB to produces collection but it doesnt work and i still get empty result.

Elasticsearch NodeJs putIndexTemplate API

I'm using elasticsearch 7.13.3 and I want to call the put index template API from my typescript app. I'm using the package "#elastic/elasticsearch": "7.13.0" but I get error for the composition of the call.
From kibana I can execute without any error:
PUT _component_template/template-xxx2
{
"template": {
"mappings": {
"properties": {
"#timestamp": {
"type": "date"
},
"id": {
"type": "keyword"
},
"value": {
"type": "double",
"coerce": false
}
}
}
}
}
PUT _index_template/index-template-xxx2
{
"index_patterns": ["template-xxx2*"],
"template": {
"settings": {
"number_of_shards": 2
},
"mappings": {
"_source": {
"enabled": true
},
"properties": {
"created_at": {
"type": "date",
"format": "EEE MMM dd HH:mm:ss Z yyyy"
}
}
},
"aliases": {
"mydata": { }
}
},
"priority": 600,
"composed_of": ["template-xxx2"],
"version": 3,
"_meta": {
"description": "template-xxx2 description"
}
}
and I want do the same from my node app.
The template creation it's ok:
void this.db.clientDb.indices.putTemplate({
name: `template_${this.index}`,
body: {
mappings: {
properties: {
'#timestamp': {
type: 'date'
},
id: {
type: 'keyword'
},
value: {
type: 'double',
coerce: false
}
}
}
}
});
But I can't find the correct overload for the this.db.clientDb.indices.putIndexTemplate({ API.
This gave me errors: (no overloads match this call)
void this.db.clientDb.indices.putIndexTemplate({
name: '',
index_patterns: ["template-xxx2*"], // --> where should I put this property?
body: {
settings: {
number_of_shards: 2
},
mappings: {
_source: {
enabled: true
}
},
aliases: {
mydata: {}
}
},
priority: 500,
composed_of: ['template-xxx2'], // --> where should I put this property?
version: 3,
_meta: {
description: 'template-xxx2 description'
}
});
I want to do this latest script.
Index templates have been overhauled in 7.8. The previous legacy endpoint was called _template and the new one is called _index_template.
You're mixing calls to the old and the new endpoint, i.e. putTemplate calls the old legacy endpoint and putIndexTemplate calls the new one.
Moreover, the whole template definition needs to go inside body, not at the top level of the call parameters.
So here is what you need to do. First, make this call to store the component template:
void this.db.clientDb.cluster.putComponentTemplate({
"name": "template-xxx2",
"body": {
"template": {
"mappings": {
"properties": {
"#timestamp": {
"type": "date"
},
"id": {
"type": "keyword"
},
"value": {
"type": "double",
"coerce": false
}
}
}
}
}
})
Then store the index template with the following call:
void this.db.clientDb.indices.putIndexTemplate({
"name": "index-template-xxx2",
"body": {
"index_patterns": ["template-xxx2*"],
"template": {
"settings": {
"number_of_shards": 2
},
"mappings": {
"_source": {
"enabled": true
},
"properties": {
"created_at": {
"type": "date",
"format": "EEE MMM dd HH:mm:ss Z yyyy"
}
}
},
"aliases": {
"mydata": { }
}
},
"priority": 600,
"composed_of": ["template-xxx2"],
"version": 3,
"_meta": {
"description": "template-xxx2 description"
}
}
})

No result returned from the nest c# elasticsearch query

I am indexing an attachment field. The POST query in sense returns expected resultset.
My query is
POST /mydocs/_search
{
"query" : {
"bool" : {
"must" : [
{ "match" : { "file.content":"abc"} },
{ "match":{"otherDetails":"asd"}},
{ "match" : { "filePermissionInfo.accountValue" : "xyz"} }
]
}
}
}
I need to convert it to a c# Nest code. I tried converting it, but its not returning any result,even it contains data. If I remove the
m.Match(mt1 => mt1.Field(f1 => f1.File.Coontent).Query(queryTerm))
from the below experssion, it returns a result set. Is there any problem with the attachement field?
client.Search<IndexDocument>(s => s
.Index("mydocs")
.Query(q => q
.Bool(b => b
.Must(m =>
m.Match(mt1 => mt1.Field(f1 => f1.File.Coontent).Query(queryTerm)) &&
m.Match(mt2 => mt2.Field(f2 => f2.FilePermissionInfo.First().SecurityIdValue).Query(accountName)) &&
m.Match(mt3 => mt3.Field(f3 => f3.OtherDetails).Query(other))
)))
);
My mapping is
{
"mydocs": {
"mappings": {
"indexdocument": {
"properties": {
"docLocation": {
"type": "string",
"index": "not_analyzed",
"store": true
},
"documentType": {
"type": "string",
"store": true
},
"file": {
"type": "attachment",
"fields": {
"content": {
"type": "string",
"term_vector": "with_positions_offsets",
"analyzer": "full"
},
"author": {
"type": "string"
},
"title": {
"type": "string",
"term_vector": "with_positions_offsets",
"analyzer": "full"
},
"name": {
"type": "string"
},
"date": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"keywords": {
"type": "string"
},
"content_type": {
"type": "string"
},
"content_length": {
"type": "integer"
},
"language": {
"type": "string"
}
}
},
"filePermissionInfo": {
"properties": {
"fileSystemRights": {
"type": "string",
"store": true
},
"securityIdValue": {
"type": "string",
"store": true
}
}
},
"id": {
"type": "double",
"store": true
},
"lastModifiedDate": {
"type": "date",
"store": true,
"format": "strict_date_optional_time||epoch_millis"
},
"otherDetails": {
"type": "string"
},
"title": {
"type": "string",
"store": true,
"term_vector": "with_positions_offsets"
}
}
}
}
}
}
It looks like the query hasn't been translated to NEST correctly. In the query you have
"filePermissionInfo.accountValue"
but in the NEST query, you only have
f2 => f2.FilePermissionInfo
which would result only in filePermissionInfo. You need to change this to
f2 => f2.FilePermissionInfo.AccountValue

ElasticSearch query stops working with big amount of data

The problem: I have 2 identical in terms of settings and mappings indexes.
The first index contains only 1 document.
The second index contains the same document + 16M of others.
When I'm running the query on the first index it returns the document, but when I do the same query on the second — I receive nothing.
Indexes settings:
{
"tasks_test": {
"settings": {
"index": {
"analysis": {
"analyzer": {
"tag_analyzer": {
"filter": [
"lowercase",
"tag_filter"
],
"tokenizer": "whitespace",
"type": "custom"
}
},
"filter": {
"tag_filter": {
"type": "word_delimiter",
"type_table": "# => ALPHA"
}
}
},
"creation_date": "1444127141035",
"number_of_replicas": "2",
"number_of_shards": "5",
"uuid": "wTe6WVtLRTq0XwmaLb7BLg",
"version": {
"created": "1050199"
}
}
}
}
}
Mappings:
{
"tasks_test": {
"mappings": {
"Task": {
"dynamic": "false",
"properties": {
"format": "dateOptionalTime",
"include_in_all": false,
"type": "date"
},
"is_private": {
"type": "boolean"
},
"last_timestamp": {
"type": "integer"
},
"name": {
"analyzer": "tag_analyzer",
"type": "string"
},
"project_id": {
"include_in_all": false,
"type": "integer"
},
"user_id": {
"include_in_all": false,
"type": "integer"
}
}
}
}
}
The document:
{
"_index": "tasks_test",
"_type": "Task",
"_id": "1",
"_source": {
"is_private": false,
"name": "135548- test with number",
"project_id": 2,
"user_id": 1
}
}
The query:
{
"query": {
"filtered": {
"query": {
"bool": {
"must": [
[
{
"match": {
"_all": {
"query": "135548",
"type": "phrase_prefix"
}
}
}
]
]
}
},
"filter": {
"bool": {
"must": [
{
"term": {
"is_private": false
}
},
{
"terms": {
"project_id": [
2
]
}
},
{
"terms": {
"user_id": [
1
]
}
}
]
}
}
}
}
}
Also, some findings:
if I replace _all with name everything works
if I replace match_phrase_prefix with match_phrase works too
ES version: 1.5.1
So, the question is: how to make the query work for the second index without mentioned hacks?

Resources