NodeJS - Elastic Search for insert data to index - node.js

I followed this link to get the mapping of the Elastic Search Index and would like to insert data(not include all fields) into it as below, but failed.
Mapping of index:
{
"mymapping": {
"mappings": {
"_meta": {
"beat": "apm",
"version": "7.5.0"
},
"dynamic_templates": [
{
"labels": {
"path_match": "labels.*",
"match_mapping_type": "string",
"mapping": {
"type": "keyword"
}
}
}
],
"properties": {
"#timestamp": {
"type": "date"
},
"people": {
"dynamic": "false",
"properties": {
"id": {
"type": "keyword",
"ignore_above": 1024
},
"name": {
"type": "keyword",
"ignore_above": 1024
}
}
}
}
}
}
}
I prepared client.js without problem, here is InsertData.js:
const esClient = require('./client');
const insertDoc = async function(indexName, _id, mappingType, data){
return await esClient.index({
index: indexName,
type: mappingType,
id: _id,
body: data
});
}
module.exports = insertDoc;
async function test(){
const data = {
beat: 'apm',
version: '7.5.0'
}
try {
const resp = await insertDoc('mymapping', 2, '_meta', data);
console.log(resp);
} catch (e) {
console.log(e);
}
}
test();
When I tried to insert data, there was exception.
Error output:
message:
'[illegal_argument_exception] Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]',
path: '/mymapping/_doc/2',
query: { type: '_meta' },
body:
{ error:
{ root_cause: [Array],
type: 'illegal_argument_exception',
reason:
'Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]' },
status: 400 },
statusCode: 400,
response:
'{
"error": {
"root_cause": [
{
"type": "illegal_argument_exception",
"reason": "Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]"
}
],
"type": "illegal_argument_exception",
"reason": "Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]"
},
"status": 400
}'
How can insert data properly?

Starting elasticsearch 6+ multiple types have been deprecated. You are trying to explicitly put the type while creating the mapping which is meta and the one which is getting inserted be default is _doc.
See this for more details : Removal of types

Related

Unable to retrive ordered job list from Google Transcoder API

i'm using the node.js client library of google transcoder api. I'm able to retrive a paginated list of some jobs, but i'm not able to order elements by start date. Here my codes:
const { TranscoderServiceClient } = require('#google-cloud/video-transcoder').v1;
class TranscoderApiController {
constructor() {
this.projectId = process.env.GOOGLE_CLOUD_PROJECT;
this.location = process.env.TASK_LOCATION;
}
async getEntries(req, res, next) {
const params = {
pageSize: req.query.pageSize ? parseInt(req.query.pageSize) : 10,
pageToken: req.query.pageToken,
filter: req.query.filter,
orderBy: req.query.orderBy
}
const client = new TranscoderServiceClient();
const result = await client.listJobs({
parent: client.locationPath(this.projectId, this.location),
pageSize: params.pageSize,
orderBy: 'createTime.seconds'
}, {
autoPaginate: false
});
if (result.length == 3 && result[2] != undefined) {
return result[2];
} else {
return result[1];
}
}
}
module.exports = new TranscoderApiController();
When i call the getEntries method i receive the following error:
"3 INVALID_ARGUMENT: The request was invalid: sort order \"createTime.seconds\" is unsupported"
If i remove the orderBy: 'createTime.seconds' line then the api works but is not ordered as i want. The result is something like that (i abbreviate the json):
{
"jobs": [
{
"labels": {},
"name": "projects/<id>/locations/europe-west1/jobs/<uuid>",
"inputUri": "",
"outputUri": "",
"state": "SUCCEEDED",
"createTime": {
"seconds": "1656602896",
"nanos": 386772728
},
"startTime": {
"seconds": "1656602900",
"nanos": 755000000
},
"endTime": {
"seconds": "1656603062",
"nanos": 428000000
},
"ttlAfterCompletionDays": 30,
"error": null,
"config": {
"inputs": [
{
"key": "input0",
"uri": "gs://<url>/render_md.mp4",
"preprocessingConfig": null
}
],
"editList": [...],
"elementaryStreams": [...],
"muxStreams": [...],
"manifests": [],
"adBreaks": [],
"spriteSheets": [],
"overlays": [],
"output": {
"uri": "gs://<url>/md.mp4/"
},
"pubsubDestination": {
"topic": "projects/<id>/topics/transcoder_api"
}
},
"jobConfig": "config"
},
...
],
"unreachable": [],
"nextPageToken": "Co8BCjgKDGV1cm9wZS13ZXN0MRIZdHJhbnNjb2Rlci5nb29nbGVhcGlzLmNvbRgBII..."
}
As you can see each job have the startTime.seconds property. I follow the syntax described here:
https://google.aip.dev/132#ordering
Any support to solve the ordered issue is appreciated.

Remove object from nested array in MongoDB using NodeJS

I can see that this question should have been answered here, but the code simply doesn't work for me (I have tried multiple, similar variations).
Here is my data:
[{
"_id": {
"$oid": "628cadf43a2fd997be8ce242"
},
"dcm": 2,
"status": true,
"comments": [
{
"id": 289733,
"dcm": 2,
"status": true,
"clock": "158",
"user": "Nathan Field",
"dept": "IT",
"department": [],
"dueback": "",
"comment": "test 1"
},
{
"id": 289733,
"dcm": 2,
"status": true,
"clock": "158",
"user": "Nathan Field",
"dept": "IT",
"department": [],
"dueback": "",
"comment": "test 2"
}
],
"department": [],
"dueback": ""
}]
And here is my code
const deleteResult = await db.collection('status').updateOne(
{ "dcm": comments.dcm },
{ $pull: { "comments": { "id": comments.id } } },
{ upsert: false },
{ multi: true }
);
Absolutely nothing happens...
So the issue ended up being something to do with running multiple update operations within one function. I have a database connection function like this:
const withDB = async (operations, res) => {
try {
const client = await MongoClient.connect('mongodb://localhost:27017', { useNewUrlParser: true });
const db = client.db('collection');
await operations(db);
client.close();
} catch (error) {
res.status(500).json({ message: 'Error connecting to db', error });
}
}
And then I call this by using:
withDB(async (db) => {
await db.collection('status').updateMany(
{ "dcm": comments.dcm },
{ $pull: { "comments": { "id": comments.id } } },
{ multi: true }
);
});
The issue occurred it would seem because I had two of these update operations within one withDB function. I have multiple operations in other instances (update item, then fetch collection), but for some reason this caused an issue.
I created a separate call to the withDB function to perform the '$pull' (delete) request, and then updated the array with the new comments.
To check that there was nothing wrong with my actual query, I used Studio3T's IntelliShell feature. If I'd done that sooner I would saved myself a lot of time!

elasticsearch node.js API remove an object from an array on a document using painless script results in array Index Out of Bounds

I want to remove items (an object) from an array on a document in elasticsearch, however whenever I try and run my update script using painless, I receive an Array Index Out of Bounds exception.
I'm using the javascript elasticsearch npm package to search elasticsearch for the relevant documents which then returns me data like:
"_index": "centres",
"_type": "doc",
"_id": "51bc77d1-b514-4f4e-85fa-412def6829f5",
"_score": 1,
"_source": {
"id": "cbaa7daa-f1a2-4ac3-8d7c-fc981245d21c",
"name": "Five House",
"openDays": [
{
"title": "new open Day",
"endDate": "2022-03-22T00:00:00.000Z",
"id": "82be934b-eeb1-419c-96ed-a58808b30df7"
},
{
"title": "last open Day",
"endDate": "2020-12-24T00:00:00.000Z",
"id": "8cc339b9-d2f8-4252-b68a-ed0a49cbfabd"
}
]
}
I then want to go through and remove certain items from the openDays array. I've created an array of the items I want to remove, so for the above example:
[
{
id: '51bc77d1-b514-4f4e-85fa-412def6829f5',
indexes: [
{
"title": "last open Day",
"endDate": "2020-12-24T00:00:00.000Z",
"id": "8cc339b9-d2f8-4252-b68a-ed0a49cbfabd"
}
]
}
]
I'm then trying to run an update via the elasticsearch node client like this:
for (const centre of updates) {
if (centre.indexes.length) {
await Promise.all(centre.indexes.map(async (theIndex) => {
const updated = await client.update({
index: 'centres',
type: 'doc',
id: centre.id,
body: {
script: {
lang: 'painless',
source: "ctx._source.openDays.remove(ctx._source.openDays.indexOf('openDayID'))",
params: {
"openDayID": theIndex.id
}
}
}
}).catch((err) => {throw err;});
}))
.catch((err) => {throw err;});
await client.indices.refresh({ index: 'centres' }).catch((err) => { throw err;});
}
}
When I run this though, it returns a 400 with an "array_index_out_of_bounds_exception" error:
-> POST http://localhost:9200/centres/doc/51bc77d1-b514-4f4e-85fa-412def6829f5/_update
{
"script": {
"lang": "painless",
"source": "ctx._source.openDays.remove(ctx._source.openDays.indexOf(\u0027openDayID\u0027))",
"params": {
"openDayID": "8cc339b9-d2f8-4252-b68a-ed0a49cbfabd"
}
}
}
<- 400
{
"error": {
"root_cause": [
{
"type": "remote_transport_exception",
"reason": "[oSsa7mn][172.17.0.2:9300][indices:data/write/update[s]]"
}
],
"type": "illegal_argument_exception",
"reason": "failed to execute script",
"caused_by": {
"type": "script_exception",
"reason": "runtime error",
"script_stack": [],
"script": "ctx._source.openDays.remove(ctx._source.openDays.indexOf(\u0027openDayID\u0027))",
"lang": "painless",
"caused_by": {
"type": "array_index_out_of_bounds_exception",
"reason": null
}
}
},
"status": 400
}
I'm not quite sure where I'm going wrong with this. Am I using the indexOf painless script correctly? Does indexOf allow for the searching of properties on objects in arrays?
I stumbled across this question and answer: Elasticsearch: Get object index with Painless script
The body of the update script needs changing like so:
Promise.all(...
const inline = `
def openDayID = '${theIndex.id}';
def openDays = ctx._source.openDays;
def openDayIndex = -1;
for (int i = 0; i < openDays.length; i++)
{
if (openDays[i].id == openDayID)
{
openDayIndex = i;
}
}
if (openDayIndex != -1) {
ctx._source.openDays.remove(openDayIndex);
}
`;
const updated = await client.update({
index: 'centres',
type: 'doc',
id: centre.id,
body: {
script: {
lang: 'painless',
inline: inline,
},
}
}).catch((err) => {throw err;});
await client.indices.refresh({ index: 'centres' }).catch((err) => { throw err;});
})).catch(... //end of Promise.all
I am not au fait with painless scripting, so there are most likely better ways of writing this e.g. breaking once the index of the ID is found.
I have also had to move the refresh statement into the Promise.all since if you're trying to remove more than one item from the array of objects, you'll be changing the document and changing the index. There is probably a better way of dealing with this too.
'openDayID' should be params.openDayID
And use removeIf:
"ctx._source.openDays.removeIf(el -> (el.id == params.openDayID))"

How to show data in React Table with structure {_id:" xx",timestamp:"xx" ,message:"{"temperature:22","humi":45}" }?

React-Table
I have made an axios.get request to the back-end which in turn gives a large data-set from mongodb. The
structure of data returned is :
[
1: {_id: "5dd3be2ecf55e1ec388f502b", timestamp: 1574157870567, message: "{"temperature":58,"humidity":59,"pressure":"1 bar"}"}
2: {_id: "5dd3be2ecf55e1ec388f502a", timestamp: 1574157870067, message: "{"temperature":78,"humidity":79,"pressure":"1 bar"}"}
...
]
I want to show it to react-table.The id and timestamp is being displayed but the temperature and other variable are not being displayed.The message is string.How can I parse such amount of data at back-end to convert message into object?
Back-end code
router.get('/viewData',async(req,res) =>{
collection.find({},{_id:0,timestamp:0}).sort({timestamp:-1}).limit(400).toArray(function (err, resultantData) {
if (err)
throw err;
//var storedDataArray ;
//var gotData=[];
//var index =0;
//storedDataArray=resultantData;
//console.log(storedDataArray)
// storedDataArray.forEach(element => {
// gotData[index]=JSON.parse(element);
// console.log(gotData[index])
// index++;
// })
// console.log(gotData.length);
res.status(200).json(resultantData);
});
Is there any way to show temperature and other quantities in react table?
React-Table
class deviceData extends Component {
constructor(props) {
super(props)
this.state = {
dataList:[],
data : ' '
};
}
componentDidMount(){
const url="http://localhost:5000/api/data/viewData";
fetch (url,{
method: "GET"
}).then(response=> response.json()).then(result=>{
console.log(result);
this.setState({
dataList : result,
});
});
}
render() {
const columns =[
{
Header:"Message ID",
accessor:"_id",
sortable: true,
filterable: false,
style:{
textAlign: "left"
},
width: 300,
maxWidth: 100,
minWidth: 100,
},
{
Header:"Time Stamp",
accessor:"timestamp",
width: 300,
maxWidth: 100,
minWidth: 100,
},
{
Header:"Temperature",
id:'temperature',
filterable: false,
accessor: 'temperature'
},
{
Header:"Pressure",
id:'pressure',
filterable: false,
accessor: 'pressure'
},
{
Header:"Humidity",
id:'humidity',
filterable: false,
accessor: 'humidity'
},
]
return(
<div className="ReactTable">
<ReactTable
columns={columns}
data={this.state.dataList}
defaultPageSize={10}
className="-striped -highlight"
>
</ReactTable>
<div id={"#"+ this.props.id} ></div>
</div>
);
}
}
[![React-Table][1]][1]
Backend Response
[
{
"_id": "5dd3be2fcf55e1ec388f502c",
"timestamp": 1574157871067,
"message": "{\"temperature\":93,\"humidity\":94,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2ecf55e1ec388f502b",
"timestamp": 1574157870567,
"message": "{\"temperature\":58,\"humidity\":59,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2ecf55e1ec388f502a",
"timestamp": 1574157870067,
"message": "{\"temperature\":78,\"humidity\":79,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2dcf55e1ec388f5029",
"timestamp": 1574157869567,
"message": "{\"temperature\":88,\"humidity\":89,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2dcf55e1ec388f5028",
"timestamp": 1574157869066,
"message": "{\"temperature\":99,\"humidity\":100,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2ccf55e1ec388f5027",
"timestamp": 1574157868567,
"message": "{\"temperature\":38,\"humidity\":39,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2ccf55e1ec388f5026",
"timestamp": 1574157868067,
"message": "{\"temperature\":82,\"humidity\":83,\"pressure\":\"1 bar\"}"
},
{
"_id": "5dd3be2bcf55e1ec388f5025",
"timestamp": 1574157867566,
"message": "{\"temperature\":76,\"humidity\":77,\"pressure\":\"1 bar\"}"
}
]
Convert string back to object by using parse()
ex: var object = JSON.parse(str);
Important thing is to define column with correct accessor. Try this one:
const columns = [
{
Header: "Id",
accessor: "_id"
},
{
Header: "timestamp",
accessor: "timestamp"
},
{
Header: "Temprature",
accessor: "message.temprature"
},
{
Header: "humidity",
accessor: "message.humidity"
},
{
Header: "pressure",
accessor: "message.pressure"
}
];
And Use it in React-table like this:
<ReactTable
data={loans} // Instead of loans, use variable where you store your response
columns={columns}
defaultPageSize={10}
sortable={true}
/>
I think you can try with adding a headers in your fetch method
fetch (url,{
method: "GET",
headers: {
"Accept": "application/json",
"Content-Type": "application/json",
}
})
It will ensure your response is JSON
N.B. Try your url with postman first, setting those headers and see whether result is JSON or not, if it is JSON I believe my code will help you, if it is not a JSON return try to change your back-end code to ensure it return JSON using postman

AWS CloudSearch Upload JSON: Value tag cannot be array or object

I Am running a lambda function (NodeJS) to upload some documents to AWS Cloud Search. I keep getting the following error.
{
"errorMessage": "{ [\"The value of tags cannot be a JSON array or object\"] }",
"errorType": "DocumentServiceException",
"stackTrace": [
"Object.extractError (/var/task/node_modules/aws-sdk/lib/protocol/json.js:48:27)",
"Request.extractError (/var/task/node_modules/aws-sdk/lib/protocol/rest_json.js:37:8)",
"Request.callListeners (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:105:20)",
"Request.emit (/var/task/node_modules/aws-sdk/lib/sequential_executor.js:77:10)",
"Request.emit (/var/task/node_modules/aws-sdk/lib/request.js:678:14)",
"Request.transition (/var/task/node_modules/aws-sdk/lib/request.js:22:10)",
"AcceptorStateMachine.runTo (/var/task/node_modules/aws-sdk/lib/state_machine.js:14:12)",
"/var/task/node_modules/aws-sdk/lib/state_machine.js:26:10",
"Request.<anonymous> (/var/task/node_modules/aws-sdk/lib/request.js:38:9)",
"Request.<anonymous> (/var/task/node_modules/aws-sdk/lib/request.js:680:12)"
]
}
I have followed the document format of
var item = {
type: 'add',
id: key,
fields: {
userid: value.userId,
storyid: value.storyId,
description: value.description,
title: value.title,
type: 'xyz'
}
}
This is the code I am using to upload the data
exports.handle = function(e, ctx, cb) {
ctx.callbackWaitsForEmptyEventLoop = false;
var documentsBatch = e.data;
var params = {
contentType: 'application/json',
documents: JSON.stringify(documentsBatch)
};
var req = cloudsearchdomain.uploadDocuments(params, function(err, data) {
if (err){
// an error occurred
cb(err, null);
}else{
// successful response
}
});
req.send();
}
My stringified data when logged looks something similar to
[
{
"type": "add",
"id": "FpgAxxxxKrM4utxosPy23--KhO6FgvxK",
"fields": {
"userid": "FpgARscKlxaxutxosPy23",
"storyid": "-KhxbPpRP7REEK",
"description": "xyz 🔥 🔥",
"title": "umm",
"type": "story"
}
},
{
"type": "add",
"id": "FccccxosPy23--KiYbrrPjtJVk2bghO-W",
"fields": {
"userid": "FpgARfPy23",
"storyid": "-KiYbrfggO-W",
"description": "noo",
"title": "lalaa out",
"type": "story"
}
}
]
Can someone point me in the right direction?
The problem was with another JSON object which had an additional JSON attribute other than fields. Once I was able to find and remove it everything worked. There should be a lint-er for the same, or the SDK should throw a better exception.

Resources