1.0.5 • Published 3 years ago
@1xtr/moleculer-elasticsearch-logger v1.0.5

Send logs to Elasticsearch directly
This is a fork from native Datadog logger
Description
Easy to send logs directly to elasticsearch
Used client "@elastic/elasticsearch": "^8.4.0"
Install
$ npm install @1xtr/moleculer-elasticsearch-logger --saveUsage
const ElasticLogger = require('@1xtr/moleculer-elasticsearch-logger')
module.exports = {
logger: new ElasticLogger({
// put here your options
})
}Default options
Note: field timestamp contain UNIX timestamp in milliseconds, but for create Data Views in Kibana need to transform it to Date type
yyyy-MM-dd'T'HH:mm:ss.SSSXXXin pipeline.
If index field not set, all logs send to moleculer-${row.ts.yyyymmdd()} indexes.
For example, moleculer-20220929
If you need to use Ingest Pipelines you can set pipeline options. Ingest pipeline example
const defaultOptions = {
clientOptions: {
node: 'http://localhost:9200',
tls: {
// ca: readFileSync('/ca.crt'),
rejectUnauthorized: false,
},
},
index: null,
pipeline: null,
source: process.env.MOL_NODE_NAME || 'moleculer',
hostname: hostname(),
objectPrinter: null,
interval: 5 * 1000,
excludeModules: []
}Options example
{
"clientOptions": {
"node": "http://es01:9200",
"auth": {
"username": "log-user",
"password": "very-StRoNg-password"
},
"tls": {
"rejectUnauthorized": false
}
},
"pipeline": "moleculer",
"excludeModules": [
"broker",
"registry",
"discovery",
"transporter",
"$node",
"transit",
"cacher"
]
}Ingest Pipeline example
- create
@timestampwith type Date from_source.timestamp - save logs to index name
moleculer-yyyyMMdd - remove
timestampfield - try parse JSON from
messagefield and save object toparsedMsg - set
requestIDfield from_source.parsedMsg.requestID - set
subdomainfield from_source.parsedMsg.subdomain - set
actionfield from_source.parsedMsg.action - set
titlefield from_source.parsedMsg.title - set
callerfield from_source.parsedMsg.caller - if
ctx.parsedMsg?.title == "Incoming webhook"add tagwebhook - remove parsed json
_source.parsedMsg - if
messageis empty drop document
[
{
"date": {
"field": "_source.timestamp",
"formats": [
"UNIX_MS"
],
"target_field": "@timestamp"
}
},
{
"date_index_name": {
"field": "_source.timestamp",
"date_rounding": "d",
"index_name_prefix": "moleculer-",
"index_name_format": "yyyyMMdd",
"date_formats": [
"UNIX_MS"
]
}
},
{
"remove": {
"field": "timestamp",
"ignore_missing": true,
"ignore_failure": true
}
},
{
"json": {
"field": "_source.message",
"target_field": "parsedMsg",
"ignore_failure": true
}
},
{
"set": {
"field": "requestID",
"copy_from": "_source.parsedMsg.requestID",
"ignore_empty_value": true
}
},
{
"set": {
"field": "subdomain",
"copy_from": "_source.parsedMsg.subdomain",
"ignore_empty_value": true
}
},
{
"set": {
"field": "action",
"copy_from": "_source.parsedMsg.action",
"ignore_empty_value": true
}
},
{
"set": {
"field": "title",
"copy_from": "_source.parsedMsg.title",
"ignore_empty_value": true
}
},
{
"set": {
"field": "caller",
"copy_from": "_source.parsedMsg.caller",
"ignore_empty_value": true
}
},
{
"script": {
"source": "ctx['tags'].add(\"webhook\");",
"if": "ctx.parsedMsg?.title == \"Incoming webhook\";",
"ignore_failure": true,
"description": "Add tag webhook"
}
},
{
"remove": {
"field": "_source.parsedMsg",
"ignore_missing": true
}
},
{
"drop": {
"if": "ctx.message === ''"
}
}
]