I have the below JSON schema
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "array",
"items": {
"type": "object",
"properties": {
"op": {
"type": "string",
"minLength": 1,
"enum": [
"add",
"remove",
"replace"
]
},
"path": {
"type": "string",
"minLength": 1,
"enum": [
"/name",
"/description",
"/prefix"
]
},
"value": {
"type": "string",
"minLength": 1
}
},
"additionalProperties": false,
"required": [
"op",
"path",
"value"
],
"minItems": 1,
"allOf": [
{
"if" : {
"properties": {
"path" : {
"const": "/name"
}
}
},
"then": {
"properties": {
"op": {
"const": "replace"
}
}
}
},
{
"if" : {
"properties": {
"path" : {
"const": "/description"
}
}
},
"then": {
"properties": {
"op": {
"const": "replace"
}
}
}
}
]
}
}
As above we can see , if name and description are there ,then op will be replace and for path prefix I have all the operations i.e add , remove and replace , but i want a special condition to be applied for remove operation
Like if path is xFix and op is remove then required parameters should not contain value attrribute , so only op and path.
I think you'll want to turn the logic of that around. Do not include "value" in the "required" list. Instead, add a condition that says, basically, "if op is not 'remove', then value is required".
{
"if" : {
"not": {
"properties": {
"op" : {
"const": "remove"
}
}
}
},
"then": {
"required": ["value"]
}
}
Related
Hey I have a problem with my mongoDb search. I need to add one more index to my search for better result. But since its not in mappings whenever i perform my search as following it returns me nothing.
`
embeddedDocument: {
path: 'produces',
operator: {
compound: {
"must": [
{
autocomplete: {
"path": "produces.name",
"query": val,
"fuzzy": {
"maxEdits": 2,
"prefixLength": 3
}
}
},
{
equals: { path: 'produces.deleted', value: false }
}
],
}
}
}
and here is my index definition
{
"mappings": {
"dynamic": false,
"fields": {
"companyName": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
],
"produces": {
"type": "embeddedDocuments",
"fields": {
"name": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
]
}
},
"status": {
"type": "string"
}
}
}
}
So i need to update my index definition as following.
{
"mappings": {
"dynamic": false,
"fields": {
"companyName": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
],
"produces": {
"type": "embeddedDocuments",
"fields": {
"deleted": {
"type": "boolean"
},
"name": [
{
"analyzer": "lucene.standard",
"type": "string"
},
{
"foldDiacritics": true,
"maxGrams": 6,
"minGrams": 2,
"tokenization": "edgeGram",
"type": "autocomplete"
}
]
}
},
"status": {
"type": "string"
}
}
}
}
`
But my problem is i dont know where to add this index definition on the mongoDB side so i can filter with deleted false criteria.
I tried to add index to mongoDB to produces collection but it doesnt work and i still get empty result.
I'm using JSON descriptors instead of proto format. Everithing works, unless the array of Todo. I need an array of Todos.
How define that? I put the "type": "array", but always return the error:
'Error: no such Type or Enum 'array' in Type .Todos'
My json file is like this:
const todo = {
"nested": {
"Services": {
"methods": {
"createTodo": {
"requestType": "Todo",
"requestStream": false,
"responseType": "Todo",
"responseStream": false
},
"readTodos": {
"requestType": "voidNoParam",
"requestStream": false,
"responseType": "Todos",
"responseStream": false
},
"readTodosStream": {
"requestType": "voidNoParam",
"requestStream": false,
"responseType": "Todo",
"responseStream": true
}
}
},
"Todo": {
"fields": {
"id": {
"type": "int32",
"id": 1
},
"text": {
"type": "string",
"id": 2
}
}
},
"Todos": {
"fields": {
"items": {
"type": "array",
"id": 1
}
}
},
"voidNoParam": {
"fields": {}
}
}
}
module.exports = todo
I found the problem, really simple.
"Todos": {
"fields": {
"items": {
"rule": "repeated",
"type": "Todo",
"id": 1
}
}
},
I would like to generate a schema from a json object.
var GenerateSchema = require('generate-schema')
var schema = GenerateSchema.json(request.body);
Request.Body
{
"type": "FeatureCollection",
"features": [
{
"type": "Feature",
"geometry": {
"type": "Point",
"coordinates": [
102,
0.5
]
},
"properties": {
"prop0": "value0"
}
},
{
"type": "Feature",
"geometry": {
"type": "LineString",
"coordinates": [
[
102,
0
],
[
103,
1
],
[
104,
0
],
[
105,
1
]
]
},
"properties": {
"prop0": "value0",
"prop1": 0
}
},
{
"type": "Feature",
"geometry": {
"type": "Polygon",
"coordinates": [
[
[
100,
0
],
[
101,
0
],
[
101,
1
],
[
100,
1
],
[
100,
0
]
]
]
},
"properties": {
"prop0": "value0",
"prop1": {
"this": "that"
}
}
}
]
}
Schema generated from the request.body
{
"$id": "http://json-schema.org/draft-04/schema#",
"$schema": "http://json-schema.org/draft-04/schema#",
"title": "Product",
"type": "object",
"properties": {
"type": {
"type": "string"
},
"features": {
"type": "array",
"items": {
"type": "object",
"properties": {
"type": {
"type": "string"
},
"geometry": {
"type": "object",
"properties": {
"type": {
"type": "string"
},
"coordinates": {
"type": "array",
"items": {
"oneOf": [
{
"type": "number"
},
{
"type": "number"
},
{
"type": "number"
},
{
"type": "number"
},
{
"type": "number"
}
],
"type": "array"
}
}
}
},
"properties": {
"type": "object",
"properties": {
"prop0": {
"type": "string"
},
"prop1": {
"type": "object",
"properties": {
"this": {
"type": "string"
}
}
}
}
}
},
"required": [
"type",
"geometry",
"properties"
]
}
}
}
}
Schema validation with Ajv
[
{
keyword: 'type',
dataPath: '.features[0].geometry.coordinates[0]',
schemaPath: '#/properties/features/items/properties/geometry/properties/coordinates/items/type',
params: { type: 'array' },
message: 'should be array'
}
]
Why Ajv detects an issue ?
Assuming that you want coordinates to be either an array of numbers or an array of arrays of numbers then this schema doesn't look right to me:
"coordinates": {
"type": "array",
"items": {
"oneOf": [
{
"type": "number"
},
{
"type": "number"
},
{
"type": "number"
},
{
"type": "number"
},
{
"type": "number"
}
],
"type": "array"
}
}
You would typically use oneOf to choose between different schemas but the oneOf here says:
it can be a number
or a number
or a number
or a number
or a number
But then ultimately it doesn't matter because you have a {"type": "array"} in the end that seems to override everything which would explain why it fails for {"coordinates": [102, 0.5]} because that is not an array of arrays.
It seems that what you're looking for is more along the lines of: (untested)
"coordinates": {
"type": "array",
"items": {
"oneOf": [
{ "type": "number" },
{ "type": "array", "items": { "type": "number"} }
]
}
}
Which reads:
items in a coordinates array can be either "numbers" or "arrays of numbers".
I think that your generate-schema package got this wrong here.
I found my answer with QuickType library
I am indexing an attachment field. The POST query in sense returns expected resultset.
My query is
POST /mydocs/_search
{
"query" : {
"bool" : {
"must" : [
{ "match" : { "file.content":"abc"} },
{ "match":{"otherDetails":"asd"}},
{ "match" : { "filePermissionInfo.accountValue" : "xyz"} }
]
}
}
}
I need to convert it to a c# Nest code. I tried converting it, but its not returning any result,even it contains data. If I remove the
m.Match(mt1 => mt1.Field(f1 => f1.File.Coontent).Query(queryTerm))
from the below experssion, it returns a result set. Is there any problem with the attachement field?
client.Search<IndexDocument>(s => s
.Index("mydocs")
.Query(q => q
.Bool(b => b
.Must(m =>
m.Match(mt1 => mt1.Field(f1 => f1.File.Coontent).Query(queryTerm)) &&
m.Match(mt2 => mt2.Field(f2 => f2.FilePermissionInfo.First().SecurityIdValue).Query(accountName)) &&
m.Match(mt3 => mt3.Field(f3 => f3.OtherDetails).Query(other))
)))
);
My mapping is
{
"mydocs": {
"mappings": {
"indexdocument": {
"properties": {
"docLocation": {
"type": "string",
"index": "not_analyzed",
"store": true
},
"documentType": {
"type": "string",
"store": true
},
"file": {
"type": "attachment",
"fields": {
"content": {
"type": "string",
"term_vector": "with_positions_offsets",
"analyzer": "full"
},
"author": {
"type": "string"
},
"title": {
"type": "string",
"term_vector": "with_positions_offsets",
"analyzer": "full"
},
"name": {
"type": "string"
},
"date": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"keywords": {
"type": "string"
},
"content_type": {
"type": "string"
},
"content_length": {
"type": "integer"
},
"language": {
"type": "string"
}
}
},
"filePermissionInfo": {
"properties": {
"fileSystemRights": {
"type": "string",
"store": true
},
"securityIdValue": {
"type": "string",
"store": true
}
}
},
"id": {
"type": "double",
"store": true
},
"lastModifiedDate": {
"type": "date",
"store": true,
"format": "strict_date_optional_time||epoch_millis"
},
"otherDetails": {
"type": "string"
},
"title": {
"type": "string",
"store": true,
"term_vector": "with_positions_offsets"
}
}
}
}
}
}
It looks like the query hasn't been translated to NEST correctly. In the query you have
"filePermissionInfo.accountValue"
but in the NEST query, you only have
f2 => f2.FilePermissionInfo
which would result only in filePermissionInfo. You need to change this to
f2 => f2.FilePermissionInfo.AccountValue
The problem: I have 2 identical in terms of settings and mappings indexes.
The first index contains only 1 document.
The second index contains the same document + 16M of others.
When I'm running the query on the first index it returns the document, but when I do the same query on the second — I receive nothing.
Indexes settings:
{
"tasks_test": {
"settings": {
"index": {
"analysis": {
"analyzer": {
"tag_analyzer": {
"filter": [
"lowercase",
"tag_filter"
],
"tokenizer": "whitespace",
"type": "custom"
}
},
"filter": {
"tag_filter": {
"type": "word_delimiter",
"type_table": "# => ALPHA"
}
}
},
"creation_date": "1444127141035",
"number_of_replicas": "2",
"number_of_shards": "5",
"uuid": "wTe6WVtLRTq0XwmaLb7BLg",
"version": {
"created": "1050199"
}
}
}
}
}
Mappings:
{
"tasks_test": {
"mappings": {
"Task": {
"dynamic": "false",
"properties": {
"format": "dateOptionalTime",
"include_in_all": false,
"type": "date"
},
"is_private": {
"type": "boolean"
},
"last_timestamp": {
"type": "integer"
},
"name": {
"analyzer": "tag_analyzer",
"type": "string"
},
"project_id": {
"include_in_all": false,
"type": "integer"
},
"user_id": {
"include_in_all": false,
"type": "integer"
}
}
}
}
}
The document:
{
"_index": "tasks_test",
"_type": "Task",
"_id": "1",
"_source": {
"is_private": false,
"name": "135548- test with number",
"project_id": 2,
"user_id": 1
}
}
The query:
{
"query": {
"filtered": {
"query": {
"bool": {
"must": [
[
{
"match": {
"_all": {
"query": "135548",
"type": "phrase_prefix"
}
}
}
]
]
}
},
"filter": {
"bool": {
"must": [
{
"term": {
"is_private": false
}
},
{
"terms": {
"project_id": [
2
]
}
},
{
"terms": {
"user_id": [
1
]
}
}
]
}
}
}
}
}
Also, some findings:
if I replace _all with name everything works
if I replace match_phrase_prefix with match_phrase works too
ES version: 1.5.1
So, the question is: how to make the query work for the second index without mentioned hacks?