Added Filebeat Configuration (#44)

Added Filebeat Configuration

Co-authored-by: Toni <matzeton@googlemail.com>
This commit is contained in:
Naix
2024-10-06 20:09:54 +11:00
committed by GitHub
parent 76e1ea0598
commit 3e2ce661f0
2 changed files with 33 additions and 0 deletions

View File

@@ -92,3 +92,8 @@ Required by `tests/run_tests.sh`
Validate nDPId JSON messages against internal event semantics.
Required by `tests/run_tests.sh`
## yaml-filebeat
An example filebeat configuration to parse and send nDPId JSON
messages to Elasticsearch. Allowing long term storage and data visualization with kibana
and various other tools that interact with Elasticsearch (No logstash required).

View File

@@ -0,0 +1,28 @@
filebeat.inputs:
- type: unix
id: "NDPId-logs" # replace this index to your preference
max_message_size: 100MiB
index: "index-name" # Replace this with your desired index name in Elasticsearch
enabled: true
path: "/var/run/nDPId.sock" # point nDPId to this Unix Socket (Collector)
processors:
- script: # execute javascript to remove the first 5-digit-number and also the Newline at the end
lang: javascript
id: trim
source: >
function process(event) {
event.Put("message", event.Get("message").trim().slice(5));
}
- decode_json_fields: # Decode the Json output
fields: ["message"]
process_array: true
max_depth: 10
target: ""
overwrite_keys: true
add_error_key: false
- drop_fields: # Deletes the Message field, which is the undecoded json (You may comment this out if you need the original message)
fields: ["message"]
- rename:
fields:
- from: "source" # Prevents a conflict in Elasticsearch and renames the field
to: "Source_Interface"