my code like this,copy from other project,but I don't know why not found loglevel field in json, detail's code:
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
Logger logger = context.getLogger("ROOT");
if (enabled == null || enabled) {
if (logger.getAppender(APPENDER_NAME) == null) {
String destination = host + ":" + port;
try {
DestinationParser.parse(destination, AbstractLogstashTcpSocketAppender.DEFAULT_PORT);
} catch (RuntimeException e) {
throw new IllegalArgumentException("Invalid host and port : " + destination);
}
LogstashTcpSocketAppender appender = new LogstashTcpSocketAppender();
appender.setContext(context);
appender.addDestination(destination);
appender.setName(APPENDER_NAME);
LogstashEncoder encoder = new LogstashEncoder();
encoder.setCustomFields("{\"application_name\":\"" + applicationName + "\"}");
encoder.setContext(context);
appender.setEncoder(encoder);
appender.start();
encoder.start();
logger.addAppender(appender);
logger.setLevel(Level.ALL);
https://github.com/Dreampie/docker-elk use docker-compose up -d --build start elk,but logstash result not contains loglevel, logstash use codec:"json":
{
"_index": "logstash-2016.10.10",
"_type": "logs",
"_id": "AVetGEgU-dbBmx39fbyl",
"_score": null,
"_source": {
"message": "{\"#timestamp\":\"2016-10-10T13:33:24.998+08:00\",\"#version\":1,\"message\":\"Retrieving delivery for Consumer: tags=[{amq.ctag-qURSKFA3CagYtd7y9EDAFQ=springCloudBus.anonymous.QTc6psI8RbOJm5oNFurqHA}], channel=Cached Rabbit Channel: AMQChannel(amqp://acfun#192.168.60.201:5672/,1), conn: Proxy#8440641 Shared Rabbit Connection: SimpleConnection#62e8f5bf [delegate=amqp://acfun#192.168.60.201:5672/], acknowledgeMode=AUTO local queue size=0\",\"logger_name\":\"org.springframework.amqp.rabbit.listener.BlockingQueueConsumer\",\"thread_name\":\"springCloudBus.anonymous.QTc6psI8RbOJm5oNFurqHA-1\",\"level\":\"DEBUG\",\"level_value\":10000,\"LOG_LEVEL_PATTERN\":\"%5p\",\"level\":\"DEBUG\",\"application_name\":\"user-api-provider\"}",
"#version": "1",
"#timestamp": "2016-10-10T05:37:11.819Z",
"host": "192.168.60.228",
"port": 52196
},
"fields": {
"#timestamp": [
1476077831819
]
},
"sort": [
1476077831819
]
}
Yeah,I got it,codec in json not get loglevel field and someother field, json_lines work fine
codec => json_lines
Related
I want to remove items (an object) from an array on a document in elasticsearch, however whenever I try and run my update script using painless, I receive an Array Index Out of Bounds exception.
I'm using the javascript elasticsearch npm package to search elasticsearch for the relevant documents which then returns me data like:
"_index": "centres",
"_type": "doc",
"_id": "51bc77d1-b514-4f4e-85fa-412def6829f5",
"_score": 1,
"_source": {
"id": "cbaa7daa-f1a2-4ac3-8d7c-fc981245d21c",
"name": "Five House",
"openDays": [
{
"title": "new open Day",
"endDate": "2022-03-22T00:00:00.000Z",
"id": "82be934b-eeb1-419c-96ed-a58808b30df7"
},
{
"title": "last open Day",
"endDate": "2020-12-24T00:00:00.000Z",
"id": "8cc339b9-d2f8-4252-b68a-ed0a49cbfabd"
}
]
}
I then want to go through and remove certain items from the openDays array. I've created an array of the items I want to remove, so for the above example:
[
{
id: '51bc77d1-b514-4f4e-85fa-412def6829f5',
indexes: [
{
"title": "last open Day",
"endDate": "2020-12-24T00:00:00.000Z",
"id": "8cc339b9-d2f8-4252-b68a-ed0a49cbfabd"
}
]
}
]
I'm then trying to run an update via the elasticsearch node client like this:
for (const centre of updates) {
if (centre.indexes.length) {
await Promise.all(centre.indexes.map(async (theIndex) => {
const updated = await client.update({
index: 'centres',
type: 'doc',
id: centre.id,
body: {
script: {
lang: 'painless',
source: "ctx._source.openDays.remove(ctx._source.openDays.indexOf('openDayID'))",
params: {
"openDayID": theIndex.id
}
}
}
}).catch((err) => {throw err;});
}))
.catch((err) => {throw err;});
await client.indices.refresh({ index: 'centres' }).catch((err) => { throw err;});
}
}
When I run this though, it returns a 400 with an "array_index_out_of_bounds_exception" error:
-> POST http://localhost:9200/centres/doc/51bc77d1-b514-4f4e-85fa-412def6829f5/_update
{
"script": {
"lang": "painless",
"source": "ctx._source.openDays.remove(ctx._source.openDays.indexOf(\u0027openDayID\u0027))",
"params": {
"openDayID": "8cc339b9-d2f8-4252-b68a-ed0a49cbfabd"
}
}
}
<- 400
{
"error": {
"root_cause": [
{
"type": "remote_transport_exception",
"reason": "[oSsa7mn][172.17.0.2:9300][indices:data/write/update[s]]"
}
],
"type": "illegal_argument_exception",
"reason": "failed to execute script",
"caused_by": {
"type": "script_exception",
"reason": "runtime error",
"script_stack": [],
"script": "ctx._source.openDays.remove(ctx._source.openDays.indexOf(\u0027openDayID\u0027))",
"lang": "painless",
"caused_by": {
"type": "array_index_out_of_bounds_exception",
"reason": null
}
}
},
"status": 400
}
I'm not quite sure where I'm going wrong with this. Am I using the indexOf painless script correctly? Does indexOf allow for the searching of properties on objects in arrays?
I stumbled across this question and answer: Elasticsearch: Get object index with Painless script
The body of the update script needs changing like so:
Promise.all(...
const inline = `
def openDayID = '${theIndex.id}';
def openDays = ctx._source.openDays;
def openDayIndex = -1;
for (int i = 0; i < openDays.length; i++)
{
if (openDays[i].id == openDayID)
{
openDayIndex = i;
}
}
if (openDayIndex != -1) {
ctx._source.openDays.remove(openDayIndex);
}
`;
const updated = await client.update({
index: 'centres',
type: 'doc',
id: centre.id,
body: {
script: {
lang: 'painless',
inline: inline,
},
}
}).catch((err) => {throw err;});
await client.indices.refresh({ index: 'centres' }).catch((err) => { throw err;});
})).catch(... //end of Promise.all
I am not au fait with painless scripting, so there are most likely better ways of writing this e.g. breaking once the index of the ID is found.
I have also had to move the refresh statement into the Promise.all since if you're trying to remove more than one item from the array of objects, you'll be changing the document and changing the index. There is probably a better way of dealing with this too.
'openDayID' should be params.openDayID
And use removeIf:
"ctx._source.openDays.removeIf(el -> (el.id == params.openDayID))"
I followed this link to get the mapping of the Elastic Search Index and would like to insert data(not include all fields) into it as below, but failed.
Mapping of index:
{
"mymapping": {
"mappings": {
"_meta": {
"beat": "apm",
"version": "7.5.0"
},
"dynamic_templates": [
{
"labels": {
"path_match": "labels.*",
"match_mapping_type": "string",
"mapping": {
"type": "keyword"
}
}
}
],
"properties": {
"#timestamp": {
"type": "date"
},
"people": {
"dynamic": "false",
"properties": {
"id": {
"type": "keyword",
"ignore_above": 1024
},
"name": {
"type": "keyword",
"ignore_above": 1024
}
}
}
}
}
}
}
I prepared client.js without problem, here is InsertData.js:
const esClient = require('./client');
const insertDoc = async function(indexName, _id, mappingType, data){
return await esClient.index({
index: indexName,
type: mappingType,
id: _id,
body: data
});
}
module.exports = insertDoc;
async function test(){
const data = {
beat: 'apm',
version: '7.5.0'
}
try {
const resp = await insertDoc('mymapping', 2, '_meta', data);
console.log(resp);
} catch (e) {
console.log(e);
}
}
test();
When I tried to insert data, there was exception.
Error output:
message:
'[illegal_argument_exception] Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]',
path: '/mymapping/_doc/2',
query: { type: '_meta' },
body:
{ error:
{ root_cause: [Array],
type: 'illegal_argument_exception',
reason:
'Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]' },
status: 400 },
statusCode: 400,
response:
'{
"error": {
"root_cause": [
{
"type": "illegal_argument_exception",
"reason": "Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]"
}
],
"type": "illegal_argument_exception",
"reason": "Rejecting mapping update to [mymapping] as the final mapping would have more than 1 type: [_doc, _meta]"
},
"status": 400
}'
How can insert data properly?
Starting elasticsearch 6+ multiple types have been deprecated. You are trying to explicitly put the type while creating the mapping which is meta and the one which is getting inserted be default is _doc.
See this for more details : Removal of types
I have a JAVA code that fetches some records from ES. The JAVA code is as follows :
public Map<String, Object> getRiskDetailsFromEventIds(final String eventId) {
try {
String elasticBaseUrl = elasticsearchConfig.getEsUrl();
String esIndexName = elasticsearchConfig.getHitsIndex();
String endpointUrl = elasticBaseUrl + "/" + esIndexName + "/_search";
String queryTemplate = IOUtils.toString(ESRepositary.class.getClassLoader().getResourceAsStream(
"querytemplates/hits_search_event_es_query_template.json"), Charset.defaultCharset());
String query = String.format(queryTemplate, eventId);
return getResultsFromElastic(endpointUrl, query);
} catch (IOException ex) {
throw new UncheckedIOException(ex);
}
}
My hits_search_event_es_query_template.json looks something like this
{
"_source": [
"hitNumber","eventId","hitScore","severityLevel","priority","hitType.typeCode","targetOwner","hitType.search","recommendedAction","referralId"
],
"query": {
"bool": {
"must": [
{
"terms": {
"eventId": ["%s"]
}
}
]
}
},
"size" : "50"
}
Now, this takes a String. My eventId is : "A,B,C,D"
The String that I need to pass to the query template should be : "A","B","C","D"
Can someone help me break down my String this way or if someone can help me modify my ES query to utilise what I have.
Getting a List of the eventIds, I am able to do the following :
String eventId = eventIds.stream().map(event -> "\""+event+"\"").collect(Collectors.joining(","));
where eventIds is the List
Also I have changed my query as below :
{
"_source": [
"hitNumber","eventId","hitScore","severityLevel","priority","hitType.typeCode","targetOwner","hitType.search","recommendedAction","referralId"
],
"query": {
"bool": {
"must": [
{
"terms": {
"eventId": [%s]
}
}
]
}
},
"size" : "50"
}
This gives me the required query which I need.
I want to add a dynamic attribute to a pattern I am using with log4js.
I am using some custom pattern, something like this:
"%d{} %-6p[%thread] %c [%x{context}]: %m%n%r"
Context is the dynamic value that I want to set with some unique id generated for each user on the server side.
There is a way to add dynamic value when creation log4js configuration by using "tokens" and "context" attributes.
But in this case values should be set during the log creation.
Is there a way to add some dynamic attribute that is set when the actual message is written to the log and not during the config phase?
Right now I am doing something like this:
log4js.configure(
{
appenders: { "file": { "type": "file", "filename": "service.log", "maxLogSize": 102400, "backups": 5, "category": "com.nextinsurance", "layout": { "type": "pattern", "pattern": "%d{} %-6p[%thread] %c [%x{context}]: %m%n%r", "tokens" : {context: function(logEvent){ return getContextFromData(logEvent) } } } }, "console" : {"type": "console"} },
categories: { "default": { "appenders": ["file", "console"], "level": "info" } }
}
);
But want to inject this value when writing to log, something like
logger.info(Message, {context: context_value})
You can use logEvent data property to fetch context. logEvent data property contains the array of args passed in log event.
Here is the sample code:
var log4js = require("log4js");
log4js.configure({
appenders: {
out: {
type: 'stdout',
layout: {
type: 'pattern',
pattern: '[%d] [%p] [%c] [%x{context}] - %m%n',
tokens: {
context: function(logEvent) {
let returnVal = logEvent.data[1] ? logEvent.data[1].context : undefined;
if (returnVal) {
logEvent.data.pop();
} else {
returnVal = 'null'; // set any default value if context is not passed.
}
return returnVal;
}
}
}
}
},
categories: {
default: {
appenders: ['out'],
level: 'INFO'
}
}
});
log4js.level = 'info';
let logger = log4js.getLogger();
logger.info('Hello', { context: 'context_value'}); // prints [2019-09-13T16:50:48.818] [INFO] [default] [context_value] - Hello
logger.info('Hello'); // prints [2019-09-13T16:50:48.820] [INFO] [default] [null] - Hello
I have a Node.js server application with Express. I would like to log its activity into ElasticSearch and visualize the logs using Kibana.
What would be the right way to do that?
Should I write a log file of json lines and read it with Logstash?
I'd recommend log4js. It has a range of useful appenders, and logstash is one of them. It works over UDP.
Here is an example taken from the log4js site:
var log4js = require('../lib/log4js');
/*
Sample logstash config:
udp {
codec => json
port => 10001
queue_size => 2
workers => 2
type => myAppType
}
*/
log4js.configure({
"appenders": [
{
type: "console",
category: "myLogger"
},
{
"host": "127.0.0.1",
"port": 10001,
"type": "logstashUDP",
"logType": "myAppType", // Optional, defaults to 'category'
"fields": { // Optional, will be added to the 'fields' object in logstash
"field1": "value1",
"field2": "value2"
},
"layout": {
"type": "pattern",
"pattern": "%m"
},
"category": "myLogger"
}
]
});
var logger = log4js.getLogger("myLogger");
logger.info("Test log message %s", "arg1", "arg2");