added Malcolm

This commit is contained in:
2021-08-06 10:35:01 +02:00
parent f043730066
commit 70f1922e80
751 changed files with 195277 additions and 0 deletions

View File

@@ -0,0 +1,3 @@
*
!.gitignore

View File

@@ -0,0 +1,40 @@
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
#================================ Modules ======================================
filebeat.modules:
- module: nginx
access:
enabled: true
var.paths: ["${FILEBEAT_NGINX_LOG_PATH:/data/nginx}/access.log*"]
error:
enabled: true
var.paths: ["${FILEBEAT_NGINX_LOG_PATH:/data/nginx}/error.log*"]
#================================ Outputs ======================================
#-------------------------- Elasticsearch output -------------------------------
output.elasticsearch:
enabled: true
hosts: ["elasticsearch:9200"]
indices:
- index: "filebeat-%{[agent.version]}-nginx-%{+yyyy.MM.dd}"
when.equals:
event.module: "nginx"
setup.template.enabled: true
setup.template.overwrite: false
setup.template.settings:
index.number_of_shards: 1
index.number_of_replicas: 0
#============================== Dashboards =====================================
setup.dashboards.enabled: true
setup.dashboards.directory: "/usr/share/filebeat/kibana"
#============================== Kibana =====================================
setup.kibana:
host: "kibana:5601"
path: "/kibana"
#================================ Logging ======================================
logging.metrics.enabled: false

View File

@@ -0,0 +1,60 @@
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
logging.metrics.enabled: false
filebeat.inputs:
- type: log
paths:
- ${FILEBEAT_LOG_PATH:/data/zeek/current}/*.log
# see comment below for signatures(_carved).log
exclude_files: ['signatures\(_carved.*\)\.log$']
symlinks: true
fields_under_root: true
# tags: ["foo"]
fields:
type: "session"
compression_level: 0
exclude_lines: ['^\s*#']
scan_frequency: ${FILEBEAT_SCAN_FREQUENCY:10s}
clean_inactive: ${FILEBEAT_CLEAN_INACTIVE:45m}
ignore_older: ${FILEBEAT_IGNORE_OLDER:30m}
close_inactive: ${FILEBEAT_CLOSE_INACTIVE:30s}
close_renamed: ${FILEBEAT_CLOSE_RENAMED:true}
close_removed: ${FILEBEAT_CLOSE_REMOVED:true}
close_eof: ${FILEBEAT_CLOSE_EOF:true}
clean_removed: ${FILEBEAT_CLEAN_REMOVED:true}
# signatures(_carved).log is different, as it comes from file carving and is
# "live" regardless of whether the other *.log files that may be processed
# after the fact. The most important difference is close_eof, as
# we don't want to close signatures(_carved).log when we get to the end of the
# file as it will likely be written to again shortly. For these reasons we
# exclude it in the main filebeat log input (see above) and handle it with
# custom settings here.
- type: log
paths:
- ${FILEBEAT_LOG_PATH:/data/zeek/current}/signatures(_carved*).log
symlinks: true
fields_under_root: true
# tags: ["foo"]
fields:
type: "session"
compression_level: 0
exclude_lines: ['^\s*#']
scan_frequency: ${FILEBEAT_SCAN_FREQUENCY:10s}
clean_inactive: 200m
ignore_older: 180m
close_inactive: 120m
close_renamed: false
close_removed: true
close_eof: false
clean_removed: true
output.logstash:
hosts: ["logstash:5044"]
ssl.enabled: ${BEATS_SSL:false}
ssl.certificate_authorities: ["/certs/ca.crt"]
ssl.certificate: "/certs/client.crt"
ssl.key: "/certs/client.key"
ssl.supported_protocols: "TLSv1.2"
ssl.verification_mode: "none"

View File

@@ -0,0 +1,139 @@
#!/usr/bin/env python3
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
import os
from os.path import splitext
from tempfile import gettempdir
import errno
import time
import fcntl
import fnmatch
import magic
import json
import pprint
import re
from subprocess import Popen, PIPE
lockFilename = os.path.join(gettempdir(), '{}.lock'.format(os.path.basename(__file__)))
broDir = os.path.join(os.getenv('FILEBEAT_ZEEK_DIR', "/data/zeek/"), '')
cleanLogSeconds = int(os.getenv('FILEBEAT_LOG_CLEANUP_MINUTES', "30")) * 60
cleanZipSeconds = int(os.getenv('FILEBEAT_ZIP_CLEANUP_MINUTES', "120")) * 60
fbRegFilename = os.getenv('FILEBEAT_REGISTRY_FILE', "/usr/share/filebeat/data/registry/filebeat/data.json")
currentDir = broDir + "current/"
processedDir = broDir + "processed/"
import os, errno
def silentRemove(filename):
try:
if os.path.isfile(filename) or os.path.islink(filename):
os.remove(filename)
elif os.path.isdir(filename):
os.rmdir(filename)
except OSError:
pass
def pruneFiles():
if (cleanLogSeconds <= 0) and (cleanZipSeconds <= 0):
# disabled, don't do anything
return
nowTime = time.time()
logMimeType = "text/plain"
archiveMimeTypeRegex = re.compile(r"(application/gzip|application/x-gzip|application/x-7z-compressed|application/x-bzip2|application/x-cpio|application/x-lzip|application/x-lzma|application/x-rar-compressed|application/x-tar|application/x-xz|application/zip)")
# look for regular files in the processed/ directory
foundFiles = [(os.path.join(root, filename)) for root, dirnames, filenames in os.walk(processedDir) for filename in filenames]
# look up the filebeat registry file and try to read it
fbReg = None
if os.path.isfile(fbRegFilename):
with open(fbRegFilename) as f:
fbReg = json.load(f)
# see if the files we found are in use and old enough to be pruned
for file in foundFiles:
# first check to see if it's in the filebeat registry
if fbReg is not None:
fileStatInfo = os.stat(file)
if (fileStatInfo):
fileFound = any(((entry['FileStateOS']) and
(entry['FileStateOS']['device'] == fileStatInfo.st_dev) and
(entry['FileStateOS']['inode'] == fileStatInfo.st_ino)) for entry in fbReg)
if fileFound:
# found a file in the filebeat registry, so leave it alone!
# we only want to delete files that filebeat has forgotten
#print "{} is found in registry!".format(file)
continue
#else:
#print "{} is NOT found in registry!".format(file)
# now see if the file is in use by any other process in the system
fuserProcess = Popen(["fuser", "-s", file], stdout=PIPE)
fuserProcess.communicate()
fuserExitCode = fuserProcess.wait()
if (fuserExitCode != 0):
# the file is not in use, let's check it's mtime/ctime
logTime = max(os.path.getctime(file), os.path.getmtime(file))
lastUseTime = nowTime - logTime
# get the file type
fileType = magic.from_file(file, mime=True)
if (cleanLogSeconds > 0) and (fileType == logMimeType):
cleanSeconds = cleanLogSeconds
elif (cleanZipSeconds > 0) and archiveMimeTypeRegex.match(fileType) is not None:
cleanSeconds = cleanZipSeconds
else:
# not a file we're going to be messing with
cleanSeconds = 0
if (cleanSeconds > 0) and (lastUseTime >= cleanSeconds):
# this is a closed file that is old, so delete it
print('removing old file "{}" ({}, used {} seconds ago)'.format(file, fileType, lastUseTime))
silentRemove(file)
# clean up any broken symlinks in the current/ directory
for current in os.listdir(currentDir):
currentFileSpec = os.path.join(currentDir, current)
if os.path.islink(currentFileSpec) and not os.path.exists(currentFileSpec):
print('removing dead symlink "{}"'.format(currentFileSpec))
silentRemove(currentFileSpec)
# clean up any old and empty directories in processed/ directory
cleanDirSeconds = min(i for i in (cleanLogSeconds, cleanZipSeconds) if i > 0)
candidateDirs = []
for root, dirs, files in os.walk(processedDir, topdown=False):
if (root and dirs):
candidateDirs += [os.path.join(root, tmpDir) for tmpDir in dirs]
candidateDirs = list(set(candidateDirs))
candidateDirs.sort(reverse=True)
candidateDirs.sort(key=len, reverse=True)
candidateDirsAndTimes = zip(candidateDirs, [os.path.getmtime(dirToRm) for dirToRm in candidateDirs])
for (dirToRm, dirTime) in candidateDirsAndTimes:
dirAge = (nowTime - dirTime)
if (dirAge >= cleanDirSeconds):
try:
os.rmdir(dirToRm)
print('removed empty directory "{}" (used {} seconds ago)'.format(dirToRm, dirAge))
except OSError:
pass
def main():
with open(lockFilename, 'w') as lock_file:
try:
fcntl.flock(lock_file, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
return
else:
pruneFiles()
finally:
os.remove(lockFilename)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,12 @@
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
function in_array() {
local haystack="${1}[@]"
local needle="${2}"
for i in "${!haystack}"; do
if [[ "${i}" == "${needle}" ]]; then
return 0
fi
done
return 1
}

View File

@@ -0,0 +1,92 @@
#!/bin/bash
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
# for files (sort -V (natural)) under /data/zeek that:
# - are not in processed/ or current/ or upload/ or extract_files/ (-prune)
# - are archive files
# - are not in use (fuser -s)
# 1. move file to processed/ (preserving original subdirectory heirarchy, if any)
# 2. calculate tags based on splitting the file path and filename (splitting on
# on [, -/_])
FILEBEAT_PREPARE_PROCESS_COUNT=1
# ensure only one instance of this script can run at a time
LOCKDIR="/tmp/zeek-beats-process-folder"
export SCRIPT_DIR="$( cd -P "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
export ZEEK_LOG_FIELD_BITMAP_SCRIPT="$SCRIPT_DIR/zeek-log-field-bitmap.py"
export ZEEK_LOG_AUTO_TAG=${AUTO_TAG:-"true"}
ZEEK_LOGS_DIR=${FILEBEAT_ZEEK_DIR:-/data/zeek/}
# remove the lock directory on exit
function cleanup {
if ! rmdir $LOCKDIR; then
echo "Failed to remove lock directory '$LOCKDIR'"
exit 1
fi
}
if mkdir $LOCKDIR; then
# ensure that if we "grabbed a lock", we release it (works for clean exit, SIGTERM, and SIGINT/Ctrl-C)
trap "cleanup" EXIT
# get new zeek logs ready for processing
cd "$ZEEK_LOGS_DIR"
find . -path ./processed -prune -o -path ./current -prune -o -path ./upload -prune -o -path ./extract_files -prune -o -type f -exec file --mime-type "{}" \; | grep -P "(application/gzip|application/x-gzip|application/x-7z-compressed|application/x-bzip2|application/x-cpio|application/x-lzip|application/x-lzma|application/x-rar-compressed|application/x-tar|application/x-xz|application/zip)" | awk -F: '{print $1}' | sort -V | \
xargs -n 1 -P $FILEBEAT_PREPARE_PROCESS_COUNT -I '{}' bash -c '
fuser -s "{}" 2>/dev/null
if [[ $? -ne 0 ]]
then
. $SCRIPT_DIR/filebeat-process-zeek-folder-functions.sh
PROCESS_TIME=$(date +%s%N)
SOURCEDIR="$(dirname "{}")"
DESTDIR="./processed/$SOURCEDIR"
DESTNAME="$DESTDIR/$(basename "{}")"
DESTDIR_EXTRACTED="${DESTNAME}_${PROCESS_TIME}"
LINKDIR="./current"
TAGS=()
if [[ "$ZEEK_LOG_AUTO_TAG" = "true" ]]; then
IFS=",-/_." read -r -a SOURCESPLIT <<< $(echo "{}" | sed "s/\.[^.]*$//")
echo "\"{}\" -> \"${DESTNAME}\""
for index in "${!SOURCESPLIT[@]}"
do
TAG_CANDIDATE="${SOURCESPLIT[index]}"
if ! in_array TAGS "$TAG_CANDIDATE"; then
if [[ -n $TAG_CANDIDATE && ! $TAG_CANDIDATE =~ ^[0-9-]+$ && $TAG_CANDIDATE != "tar" && $TAG_CANDIDATE != "AUTOZEEK" && ! $TAG_CANDIDATE =~ ^AUTOCARVE ]]; then
TAGS+=("${TAG_CANDIDATE}")
fi
fi
done
fi
mkdir -p "$DESTDIR"
mkdir -p "$DESTDIR_EXTRACTED"
mv -v "{}" "$DESTNAME"
python3 -m pyunpack.cli "$DESTNAME" "$DESTDIR_EXTRACTED"
find "$DESTDIR_EXTRACTED" -type f -name "*.log" | while read LOGFILE
do
PROCESS_TIME=$(date +%s%N)
TAGS_JOINED=$(printf "%s," "${TAGS[@]}")${PROCESS_TIME}
FIELDS_BITMAP="$($ZEEK_LOG_FIELD_BITMAP_SCRIPT "$LOGFILE" | head -n 1)"
LINKNAME_BASE="$(basename "$LOGFILE" .log)"
if [[ -n $FIELDS_BITMAP ]]; then
LINKNAME="${LINKNAME_BASE}(${TAGS_JOINED},${FIELDS_BITMAP}).log"
else
LINKNAME="${LINKNAME_BASE}(${TAGS_JOINED}).log"
fi
touch "$LOGFILE"
ln -sfr "$LOGFILE" "$LINKDIR/$LINKNAME"
done
fi
'
fi

View File

@@ -0,0 +1,22 @@
#!/bin/bash
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
PROCESS_DIR=${FILEBEAT_ZEEK_DIR:-/data/zeek/}
UPLOAD_DIR="${PROCESS_DIR}/upload"
mkdir -p "$UPLOAD_DIR"
# as new zeek log archives are closed for writing in /data/zeek/upload, move them to /data/zeek for processing
inotifywait -m -e close_write --format '%w%f' "${UPLOAD_DIR}" | while read NEWFILE
do
FILEMIME=$(file -b --mime-type "$NEWFILE")
if ( echo "$FILEMIME" | grep --quiet -P "(application/gzip|application/x-gzip|application/x-7z-compressed|application/x-bzip2|application/x-cpio|application/x-lzip|application/x-lzma|application/x-rar-compressed|application/x-tar|application/x-xz|application/zip)" ); then
# looks like this is a compressed file, we're assuming it's a zeek log archive to be processed by filebeat
sleep 0.1 && chown ${PUID:-${DEFAULT_UID}}:${PGID:-${DEFAULT_GID}} "$NEWFILE" && (>&2 mv -v "$NEWFILE" "$PROCESS_DIR/")
else
# unhandled file type uploaded, delete it
sleep 0.1 && chown ${PUID:-${DEFAULT_UID}}:${PGID:-${DEFAULT_GID}} && (>&2 rm "$NEWFILE") && echo "Removed \"$NEWFILE\", unhandled file type \"$FILEMIME\""
fi
done

View File

@@ -0,0 +1,152 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
###################################################################################################
# parse the fields names from the header of of the log file and compare them to the
# known list of total fields. if this zeek log has is a subset of the known fields,
# create a bitmap of the included fields to be included as a special tag
# which can help the logstash parser know on a line-by-line basis which fields are included.
# when logstash-filter-dissect gets this implemented, we may not have to do this:
# - https://github.com/logstash-plugins/logstash-filter-dissect/issues/56
# - https://github.com/logstash-plugins/logstash-filter-dissect/issues/62
#
# arguments: accepts one argument, the name of a zeek log file
# output: returns a string suitable for use as a tag indicating the field bitset., eg., ZEEKFLDx00x01FFFFFF
#
# ZEEKFLDx00x01FFFFFF
# | └ bitmap of included fields within field list
# └ index into zeekLogFields list indicating (to support legacy field configurations, see below)
#
# example:
# $ ./zeek-log-field-bitmap.py /path/to/conn.log
# ZEEKFLDx00x01FFFFFF
#
# there are two cases we're trying to cover here by indicating the field types:
# 1. certain fields can be turned on/off in config (for example, enabling/disabling MACs or VLANs for conn.log)
# 2. a Zeek version upgrade changed the field list (see notes about DHCP.log in
# https://docs.zeek.org/en/latest/install/release-notes.html#bro-2-6)
#
# The first case is pretty simple, because in that case the fields in the zeek log will be some subset of
# the list of all known fields for that type.
#
# The second case is more complicated because the field list could be completely different. Because of this case
# each of the entries in zeekLogFields is itself a list, with older configurations occuring earlier in the list
#
# $ zeek-log-field-bitmap.py ./bro2.5/dhcp.log
# ZEEKFLDx00x000003FF
#
# $ zeek-log-field-bitmap.py ./bro2.6/dhcp.log
# ZEEKFLDx01x00007FFF
#
import sys
import os
import json
from collections import defaultdict
from ordered_set import OrderedSet
# lists of all known fields for each type of zeek log we're concerned with mapping (ordered as in the .log file header)
# are stored in zeek-log-fields.json
FIELDS_JSON_FILE = os.path.join(os.path.dirname(os.path.realpath(__file__)), "zeek-log-fields.json")
ZEEK_LOG_DELIMITER = '\t' # zeek log file field delimiter
ZEEK_LOG_HEADER_LOGTYPE = 'path' # header value for zeek log type (conn, weird, etc.)
ZEEK_LOG_HEADER_FIELDS = 'fields' # header value for zeek log fields list
# file prefix for bitmap to stdout, eg., ZEEKFLDx00x01FFFFFF
ZEEK_LOG_BITMAP_PREFIX = 'ZEEKFLD'
###################################################################################################
# print to stderr
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
###################################################################################################
# Set the index'th bit of v to 1 if x is truthy, else to 0, and return the new value
def set_bit(v, index, x):
mask = 1 << index # Compute mask, an integer with just bit 'index' set.
v &= ~mask # Clear the bit indicated by the mask (if x is False)
if x:
v |= mask # If x was True, set the bit indicated by the mask.
return v
###################################################################################################
# main
def main():
errCode = os.EX_DATAERR
dataError = False
zeekLogFields = defaultdict(list)
# load from json canonical list of known zeek log fields we're concerned with mapping
zeekLogFieldsTmp = json.load(open(FIELDS_JSON_FILE, 'r'))
if isinstance(zeekLogFieldsTmp, dict):
for logType, listOfFieldLists in zeekLogFieldsTmp.items():
if isinstance(logType, str) and isinstance(listOfFieldLists, list):
zeekLogFields[str(logType)] = [OrderedSet(fieldList) for fieldList in listOfFieldLists]
else:
dataError = True
break
else:
dataError = True
if dataError:
# something is wrong with the json file
eprint("Error loading {} (not found or incorrectly formatted)".format(FIELDS_JSON_FILE))
else:
if (len(sys.argv) == 2) and os.path.isfile(sys.argv[1]):
fieldsBitmap = 0
# loop over header lines in zeek log file (beginning with '#') and extract the header values
# into a dictionary containing, among other things:
# - the "path" which is the zeek log type (eg., conn, weird, etc.)
# - the "fields" list of field names
headers = {}
with open(sys.argv[1], "r") as zeekLogFile:
for line in zeekLogFile:
if line.startswith('#'):
values = line.strip().split(ZEEK_LOG_DELIMITER)
key = values.pop(0)[1:]
if (len(values) == 1):
headers[key] = values[0]
else:
headers[key] = values
else:
break
if ((ZEEK_LOG_HEADER_LOGTYPE in headers) and # the "path" header exists
(ZEEK_LOG_HEADER_FIELDS in headers) and # the "fields" header exists
(headers[ZEEK_LOG_HEADER_LOGTYPE] in zeekLogFields)): # this zeek log type is one we're concerned with mapping
# the set of field names in *this* log file
logFieldNames = OrderedSet(headers[ZEEK_LOG_HEADER_FIELDS])
for versionIdx, allFieldNames in reversed(list(enumerate(zeekLogFields[headers[ZEEK_LOG_HEADER_LOGTYPE]]))):
# are this logfile's fields a subset of the complete list?
if logFieldNames.issubset(allFieldNames):
# determine which fields in the complete list are included in this log file
for i, fName in enumerate(allFieldNames):
fieldsBitmap = set_bit(fieldsBitmap, i, fName in logFieldNames)
# eprint(fieldsBitmap)
print('{0}x{1:02X}x{2:08X}'.format(ZEEK_LOG_BITMAP_PREFIX, versionIdx, fieldsBitmap))
errCode = os.EX_OK
else:
# invalid command-line arguments
eprint("{} <Zeek log file>".format(sys.argv[0]))
errCode = os.EX_USAGE
return errCode
if __name__ == '__main__':
sys.exit(main())

View File

@@ -0,0 +1,273 @@
{
"conn": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"proto",
"service",
"duration",
"orig_bytes",
"resp_bytes",
"conn_state",
"local_orig",
"local_resp",
"missed_bytes",
"history",
"orig_pkts",
"orig_ip_bytes",
"resp_pkts",
"resp_ip_bytes",
"tunnel_parents",
"vlan",
"inner_vlan",
"orig_l2_addr",
"resp_l2_addr",
"community_id"
]
],
"dhcp": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"mac",
"assigned_ip",
"lease_time",
"trans_id"
],
[
"ts",
"uids",
"client_addr",
"server_addr",
"mac",
"host_name",
"client_fqdn",
"domain",
"requested_addr",
"assigned_addr",
"lease_time",
"client_message",
"server_message",
"msg_types",
"duration",
"client_software",
"server_software"
]
],
"files": [
[
"ts",
"fuid",
"tx_hosts",
"rx_hosts",
"conn_uids",
"source",
"depth",
"analyzers",
"mime_type",
"filename",
"duration",
"local_orig",
"is_orig",
"seen_bytes",
"total_bytes",
"missing_bytes",
"overflow_bytes",
"timedout",
"parent_fuid",
"md5",
"sha1",
"sha256",
"extracted",
"extracted_cutoff",
"extracted_size"
]
],
"http": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"trans_depth",
"method",
"host",
"uri",
"referrer",
"version",
"user_agent",
"origin",
"request_body_len",
"response_body_len",
"status_code",
"status_msg",
"info_code",
"info_msg",
"tags",
"username",
"password",
"proxied",
"orig_fuids",
"orig_filenames",
"orig_mime_types",
"resp_fuids",
"resp_filenames",
"resp_mime_types",
"post_username",
"post_password_plain",
"post_password_md5",
"post_password_sha1",
"post_password_sha256"
]
],
"ntlm": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"username",
"hostname",
"domainname",
"success",
"status"
],
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"username",
"hostname",
"domainname",
"server_nb_computer_name",
"server_dns_computer_name",
"server_tree_name",
"success"
]
],
"rdp": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"cookie",
"result",
"security_protocol",
"client_channels",
"keyboard_layout",
"client_build",
"client_name",
"client_dig_product_id",
"desktop_width",
"desktop_height",
"requested_color_depth",
"cert_type",
"cert_count",
"cert_permanent",
"encryption_level",
"encryption_method"
]
],
"smb_files": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"fuid",
"action",
"path",
"name",
"size",
"prev_name",
"times.modified",
"times.accessed",
"times.created",
"times.changed",
"data_offset_req",
"data_len_req",
"data_len_rsp"
]
],
"ssh": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"version",
"auth_success",
"auth_attempts",
"direction",
"client",
"server",
"cipher_alg",
"mac_alg",
"compression_alg",
"kex_alg",
"host_key_alg",
"host_key",
"remote_location.country_code",
"remote_location.region",
"remote_location.city",
"remote_location.latitude",
"remote_location.longitude",
"hasshVersion",
"hassh",
"hasshServer",
"cshka",
"hasshAlgorithms",
"sshka",
"hasshServerAlgorithms"
]
],
"ssl": [
[
"ts",
"uid",
"id.orig_h",
"id.orig_p",
"id.resp_h",
"id.resp_p",
"version",
"cipher",
"curve",
"server_name",
"resumed",
"last_alert",
"next_protocol",
"established",
"cert_chain_fuids",
"client_cert_chain_fuids",
"subject",
"issuer",
"client_subject",
"client_issuer",
"validation_status",
"ja3",
"ja3s"
]
]
}

View File

@@ -0,0 +1,69 @@
; Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
[unix_http_server]
file=/tmp/supervisor.sock ; (the path to the socket file)
chmod=0700
[supervisord]
nodaemon=true
logfile=/dev/null
logfile_maxbytes=0
pidfile=/tmp/supervisord.pid
[rpcinterface:supervisor]
supervisor.rpcinterface_factory=supervisor.rpcinterface:make_main_rpcinterface
[supervisorctl]
serverurl=unix:///tmp/supervisor.sock
[program:filebeat]
command=/usr/local/bin/docker-entrypoint -e --strict.perms=false
user=%(ENV_PUSER)s
startsecs=0
startretries=0
stopasgroup=true
killasgroup=true
directory=/usr/share/filebeat
stdout_logfile=/dev/fd/1
stdout_logfile_maxbytes=0
redirect_stderr=true
[program:filebeat-nginx]
command=bash -c "/data/elastic_search_status.sh && /usr/local/bin/docker-entrypoint -e --strict.perms=false \
--path.home /usr/share/filebeat-nginx \
--path.config /usr/share/filebeat-nginx \
--path.data /usr/share/filebeat-nginx/data \
-c /usr/share/filebeat-nginx/filebeat-nginx.yml \
--modules nginx"
user=%(ENV_PUSER)s
autostart=%(ENV_NGINX_LOG_ACCESS_AND_ERRORS)s
startsecs=30
startretries=2000000000
stopasgroup=true
killasgroup=true
directory=/usr/share/filebeat-nginx
stdout_logfile=/dev/fd/1
stdout_logfile_maxbytes=0
redirect_stderr=true
[program:watch-upload]
command=/bin/bash -c "sleep 30 && /data/filebeat-watch-zeeklogs-uploads-folder.sh"
user=root
startsecs=35
startretries=1
stopasgroup=true
killasgroup=true
directory=/data
stdout_logfile=/dev/fd/1
stdout_logfile_maxbytes=0
redirect_stderr=true
[program:cron]
autorestart=true
command=/usr/local/bin/supercronic -json "%(ENV_SUPERCRONIC_CRONTAB)s"
user=%(ENV_PUSER)s
stopasgroup=true
killasgroup=true
stdout_logfile=/dev/fd/1
stdout_logfile_maxbytes=0
redirect_stderr=true