Creating mapping in Elasticsearch with Fluentd - node.js

I'm trying to create mapping in Elasticsearch using Fluentd, which Node.js connects to.
Elasticsearch mapping example:
PUT http://host:9200/test_mapping
{
"mappings": {
"properties": {
"response_code": {
"type": "text",
"fielddata": true
},
"response_text": {
"type": "text",
"fielddata": true
},
"status": {
"type": "boolean"
},
"ip": {
"type": "ip"
},
"login": {
"type": "text",
"fielddata": true
}
}
}
}
Fluentd configuration example:
<source>
#type forward
port 24225
</source>
<match mapping.doc>
#type elasticsearch
logstash_format "#{ENV['LOGSTASH_FORMAT']}"
scheme "#{ENV['SCHEME']}"
host "#{ENV['HOST']}"
port "#{ENV['PORT']}"
write_operation index
index_name "#{ENV['INDEX_NAME']}"
flush_interval "#{ENV['FLUSH_INTERVAL']}"
</match>
Sample code on Node.js:
// ...
require('dotenv').config();
const env = process.env;
const loggerfluentd = require('fluent-logger');
loggerfluentd.configure('mapping', {
host: env.FLUENTD_HOST,
port: Number.parseInt(env.FLUENTD_PORT),
timeout: 3.0,
reconnectInterval: 10000 // 10 sec
});
function EmitMapping(data) {
loggerfluentd.emit(env.INDEX_NAME, data);
}
exports.EmitMapping = EmitMapping;
This configuration does not create mapping, but simply adds new documents to Elasticsearch.
Is it possible to change the configuration so that by executing the EmitMapping () function not to add new documents (with automatically assigned data types in mapping), namely to create your own mapping with your own data types?

There is a possibility that the elasticsearch plugin does not create and does not change the index, but simply writes to the index, therefore I used the http plugin:
<match mapping.doc>
#type http
endpoint "#{ENV['SCHEME']}://#{ENV['HOST']}:#{ENV['PORT']}/#{ENV['INDEX_NAME']}"
http_method put
headers {"Content-Type":"application/json"}
open_timeout 2
<buffer>
flush_interval "#{ENV['FLUSH_INTERVAL']}"
</buffer>
</match>

Related

Elastic search sort error - search_phase_execution_exception

I am facing an issue with sorting values in an elastic search query. I am doing a simple search with sort but getting the following error. The query works without a sort parameter.
Elastic search client version: Version 7.6.1(Using this version because I am using opensearch)
search_phase_execution_exception: [illegal_argument_exception] Reason:
Text fields are not optimised for operations that require per-document
field data like aggregations and sorting, so these operations are
disabled by default. Please use a keyword field instead.
Alternatively, set fielddata=true on [subtype] in order to load field
data by uninverting the inverted index. Note that this can use
significant memory.
Code Sample:
const {Client} = require('#elastic/elasticsearch') // Version 7.6.1
var connectionString = 'https://admin:admin#localhost:9200'
const client = new Client({
node: connectionString,
ssl: {
rejectUnauthorized: false
}
})
client.info()
.then(async response => {
console.log('success', response.statusCode)
var query = {
"query": {
"match": {
"revhostname": {
"query": "ten.tsacmoc.ac.1dsh.631-651-14-37-c",
},
},
},
"sort": [
{
"revhostname": {"order": "asc"},
"subtype": {"order": "asc"},
"value": {"order": "asc"},
}
],
};
var response = await client.search({
index: 'r7',
body: query,
});
console.log("Search results:", JSON.stringify(response));
})
.catch(error => {
console.error('error', JSON.stringify(error))
})
Mapping:
{
"properties": {
"revhostname": {
"type" : "keyword"
},
"value": {
"type" : "keyword"
},
"subtype": {
"type" : "keyword"
},
"timestamp": {
"type" : "long"
},
"ip": {
"type" : "ip"
}
}
}
I tried adding fielddata=true in mapping but the issue was not solved. Your help is much appreciated.
Thank you.
As you mentioned mapping in a comment, your revhostname field is defined as text and keyword both type of field and Elasticsearch dont allow sorting on text type of field.
If your mapping is still same as you mentioned in comment then you need to use the field name like revhostname.keyword which will resolved issue.
const {Client} = require('#elastic/elasticsearch') // Version 7.6.1
var connectionString = 'https://admin:admin#localhost:9200'
const client = new Client({
node: connectionString,
ssl: {
rejectUnauthorized: false
}
})
client.info()
.then(async response => {
console.log('success', response.statusCode)
var query = {
"query": {
"match": {
"revhostname": {
"query": "ten.tsacmoc.ac.1dsh.631-651-14-37-c",
},
},
},
"sort": [
{
"revhostname.keyword": {"order": "asc"},
"subtype.keyword": {"order": "asc"},
"value.keyword": {"order": "asc"},
}
],
};
var response = await client.search({
index: 'r7',
body: query,
});
console.log("Search results:", JSON.stringify(response));
})
.catch(error => {
console.error('error', JSON.stringify(error))
})

Add dynamic value when writing log message with log4js

I want to add a dynamic attribute to a pattern I am using with log4js.
I am using some custom pattern, something like this:
"%d{} %-6p[%thread] %c [%x{context}]: %m%n%r"
Context is the dynamic value that I want to set with some unique id generated for each user on the server side.
There is a way to add dynamic value when creation log4js configuration by using "tokens" and "context" attributes.
But in this case values should be set during the log creation.
Is there a way to add some dynamic attribute that is set when the actual message is written to the log and not during the config phase?
Right now I am doing something like this:
log4js.configure(
{
appenders: { "file": { "type": "file", "filename": "service.log", "maxLogSize": 102400, "backups": 5, "category": "com.nextinsurance", "layout": { "type": "pattern", "pattern": "%d{} %-6p[%thread] %c [%x{context}]: %m%n%r", "tokens" : {context: function(logEvent){ return getContextFromData(logEvent) } } } }, "console" : {"type": "console"} },
categories: { "default": { "appenders": ["file", "console"], "level": "info" } }
}
);
But want to inject this value when writing to log, something like
logger.info(Message, {context: context_value})
You can use logEvent data property to fetch context. logEvent data property contains the array of args passed in log event.
Here is the sample code:
var log4js = require("log4js");
log4js.configure({
appenders: {
out: {
type: 'stdout',
layout: {
type: 'pattern',
pattern: '[%d] [%p] [%c] [%x{context}] - %m%n',
tokens: {
context: function(logEvent) {
let returnVal = logEvent.data[1] ? logEvent.data[1].context : undefined;
if (returnVal) {
logEvent.data.pop();
} else {
returnVal = 'null'; // set any default value if context is not passed.
}
return returnVal;
}
}
}
}
},
categories: {
default: {
appenders: ['out'],
level: 'INFO'
}
}
});
log4js.level = 'info';
let logger = log4js.getLogger();
logger.info('Hello', { context: 'context_value'}); // prints [2019-09-13T16:50:48.818] [INFO] [default] [context_value] - Hello
logger.info('Hello'); // prints [2019-09-13T16:50:48.820] [INFO] [default] [null] - Hello

How to override the default loopback model API paths

How can we override the default loopback REST API model end points? For example, I would like to invoke a custom model method named list when the following GET API is invoked.
I am referring to the documentation https://loopback.io/doc/en/lb2/Exposing-models-over-REST.html
1.API endpoint from loopback explorer: http://localhost:3000/api/Assets
2.Model method definition:
Asset.list = function(cb) {
console.log("called");
}
Asset.remoteMethod('list', {
http: {path: '/'},
returns: {type: 'Object', root: true}
});
If you want to use not default path (not used by default methods) you should add new remote method to JSON model config and define method in JS model file:
"methods": {
"myCustomMethod": {
"accepts": [
{
"arg": "req",
"type": "object",
"http": {
"source": "req"
}
}
],
"returns": [
{
"type": "Array",
"root": true
}
],
"http": {
"verb": "GET",
"path": "/myCustomPath"
}
}
}
Candidate.myCustomMethod = (req) => {//method code}
If you want to override default loopback path (autogenerated methods) you also should disable default method.
Candidate.disableRemoteMethodByName('find');
So now you can change in JSON config "/myCustomPath" to "/" and your remote method would refer to your function instead of default.
Your console.log("called"); should appear in your terminal only, not as a return on your web browser - that's maybe why you aren't seeing it at the moment.
If you want to see something on your web browser, you have to return a value for you callback, like:
module.exports = function (Asset) {
Asset.list = function(cb) {
console.log("called");
cb(false, {'called': true}); // cb(error, returned value(s));
}
Asset.remoteMethod('list', {
http: {verb: 'get'},
returns: {type: 'Object', root: true}
});
}
This file should be in your common/model/asset.js
In your server/model-config.json, do not forget to reference your model:
...
"Asset": {
"dataSource": "yourdatasource", //change this by your datasource name
"public": true
}
...

Logstash how can add feild(not show loglevel)?

my code like this,copy from other project,but I don't know why not found loglevel field in json, detail's code:
LoggerContext context = (LoggerContext) LoggerFactory.getILoggerFactory();
Logger logger = context.getLogger("ROOT");
if (enabled == null || enabled) {
if (logger.getAppender(APPENDER_NAME) == null) {
String destination = host + ":" + port;
try {
DestinationParser.parse(destination, AbstractLogstashTcpSocketAppender.DEFAULT_PORT);
} catch (RuntimeException e) {
throw new IllegalArgumentException("Invalid host and port : " + destination);
}
LogstashTcpSocketAppender appender = new LogstashTcpSocketAppender();
appender.setContext(context);
appender.addDestination(destination);
appender.setName(APPENDER_NAME);
LogstashEncoder encoder = new LogstashEncoder();
encoder.setCustomFields("{\"application_name\":\"" + applicationName + "\"}");
encoder.setContext(context);
appender.setEncoder(encoder);
appender.start();
encoder.start();
logger.addAppender(appender);
logger.setLevel(Level.ALL);
https://github.com/Dreampie/docker-elk use docker-compose up -d --build start elk,but logstash result not contains loglevel, logstash use codec:"json":
{
"_index": "logstash-2016.10.10",
"_type": "logs",
"_id": "AVetGEgU-dbBmx39fbyl",
"_score": null,
"_source": {
"message": "{\"#timestamp\":\"2016-10-10T13:33:24.998+08:00\",\"#version\":1,\"message\":\"Retrieving delivery for Consumer: tags=[{amq.ctag-qURSKFA3CagYtd7y9EDAFQ=springCloudBus.anonymous.QTc6psI8RbOJm5oNFurqHA}], channel=Cached Rabbit Channel: AMQChannel(amqp://acfun#192.168.60.201:5672/,1), conn: Proxy#8440641 Shared Rabbit Connection: SimpleConnection#62e8f5bf [delegate=amqp://acfun#192.168.60.201:5672/], acknowledgeMode=AUTO local queue size=0\",\"logger_name\":\"org.springframework.amqp.rabbit.listener.BlockingQueueConsumer\",\"thread_name\":\"springCloudBus.anonymous.QTc6psI8RbOJm5oNFurqHA-1\",\"level\":\"DEBUG\",\"level_value\":10000,\"LOG_LEVEL_PATTERN\":\"%5p\",\"level\":\"DEBUG\",\"application_name\":\"user-api-provider\"}",
"#version": "1",
"#timestamp": "2016-10-10T05:37:11.819Z",
"host": "192.168.60.228",
"port": 52196
},
"fields": {
"#timestamp": [
1476077831819
]
},
"sort": [
1476077831819
]
}
Yeah,I got it,codec in json not get loglevel field and someother field, json_lines work fine
codec => json_lines

How to log from Node.js with Express to ELK?

I have a Node.js server application with Express. I would like to log its activity into ElasticSearch and visualize the logs using Kibana.
What would be the right way to do that?
Should I write a log file of json lines and read it with Logstash?
I'd recommend log4js. It has a range of useful appenders, and logstash is one of them. It works over UDP.
Here is an example taken from the log4js site:
var log4js = require('../lib/log4js');
/*
Sample logstash config:
udp {
codec => json
port => 10001
queue_size => 2
workers => 2
type => myAppType
}
*/
log4js.configure({
"appenders": [
{
type: "console",
category: "myLogger"
},
{
"host": "127.0.0.1",
"port": 10001,
"type": "logstashUDP",
"logType": "myAppType", // Optional, defaults to 'category'
"fields": { // Optional, will be added to the 'fields' object in logstash
"field1": "value1",
"field2": "value2"
},
"layout": {
"type": "pattern",
"pattern": "%m"
},
"category": "myLogger"
}
]
});
var logger = log4js.getLogger("myLogger");
logger.info("Test log message %s", "arg1", "arg2");

Resources