added Malcolm

This commit is contained in:
2021-08-06 10:35:01 +02:00
parent f043730066
commit 70f1922e80
751 changed files with 195277 additions and 0 deletions

View File

@@ -0,0 +1,149 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import json
import re
import os
import sys
import time
TEMPLATE_POLICY_ID_DEFAULT = 'session_index_policy'
INDEX_PATTERN_DEFAULT = 'sessions2-*'
TEMPLATE_SCHEMA_VERSION_DEFAULT = 1
POLICY_STATE_HOT = 'hot'
POLICY_STATE_SNAPSHOT = 'recent'
POLICY_STATE_COLD = 'cold'
POLICY_STATE_CLOSED = 'closed'
POLICY_STATE_DELETE = 'delete'
POLICY_STATE_HOT_REPLICAS = 0
POLICY_SNAPSHOT_NAME = 'session_snapshot'
###################################################################################################
debug = False
scriptName = os.path.basename(__file__)
scriptPath = os.path.dirname(os.path.realpath(__file__))
origPath = os.getcwd()
###################################################################################################
# print to stderr
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
###################################################################################################
# convenient boolean argument parsing
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
###################################################################################################
# main
def main():
global debug
parser = argparse.ArgumentParser(description=scriptName, add_help=False, usage='{} <arguments>'.format(scriptName))
parser.add_argument('-v', '--verbose', dest='debug', type=str2bool, nargs='?', const=True, default=False, help="Verbose output")
parser.add_argument('--policy', dest='policyId', metavar='<str>', type=str, default=TEMPLATE_POLICY_ID_DEFAULT, help='Index management policy ID')
parser.add_argument('--index-pattern', dest='indexPattern', metavar='<str>', type=str, default=os.getenv('ARKIME_INDEX_PATTERN', INDEX_PATTERN_DEFAULT), help='Index management policy index pattern (comma-separated)')
parser.add_argument('--priority', dest='templatePriority', metavar='<int>', type=int, default=100, help='Template priority')
parser.add_argument('--version', dest='schemaVersion', metavar='<int>', type=int, default=TEMPLATE_SCHEMA_VERSION_DEFAULT, help='Index management policy template schema version')
parser.add_argument('--replicas', dest='hotReplicaCount', metavar='<int>', type=int, default=POLICY_STATE_HOT_REPLICAS, help='Replica count for hot state')
parser.add_argument('--snapshot', dest='snapshotAge', metavar='<str>', type=str, default='1d', help='Snapshot index age (e.g., 1d); 0 to disable')
parser.add_argument('--snapshot-repo', dest='snapshotRepo', metavar='<str>', type=str, default=os.getenv('ISM_SNAPSHOT_REPO', 'logs'), help='Snapshot repository')
parser.add_argument('--snapshot-name', dest='snapshotName', metavar='<str>', type=str, default=POLICY_SNAPSHOT_NAME, help='Snapshot name')
parser.add_argument('--cold', dest='coldAge', metavar='<str>', type=str, default='30d', help='Cold state index age (e.g., 30d); 0 to disable')
parser.add_argument('--close', dest='closeAge', metavar='<str>', type=str, default='60d', help='Close state index age (e.g., 60d); 0 to disable')
parser.add_argument('--delete', dest='deleteAge', metavar='<str>', type=str, default='365d', help='Delete state index age (e.g., 365d); 0 to disable')
try:
parser.error = parser.exit
args = parser.parse_args()
except SystemExit:
parser.print_help()
exit(2)
debug = args.debug
if debug:
eprint(os.path.join(scriptPath, scriptName))
eprint("Arguments: {}".format(sys.argv[1:]))
eprint("Arguments: {}".format(args))
else:
sys.tracebacklimit = 0
# verify that age parameters are in the right format (number and units)
for ageParam in (args.snapshotAge, args.coldAge, args.closeAge, args.deleteAge):
if not ((ageParam == '0') or re.match(r'^\d+[dhms]$', ageParam)):
raise argparse.ArgumentTypeError(f'Invalid age parameter {ageParam}')
# store policy information
policyDict = dict()
policyDict['policy_id'] = args.policyId
policyDict['description'] = f'Index state management policy to snapshot indices after {args.snapshotAge}, move them into a cold state after {args.coldAge} and delete them after {args.deleteAge}'
policyDict['last_updated_time'] = time.time_ns() // 1000000
policyDict['schema_version'] = args.schemaVersion
policyDict['error_notification'] = None
# list of states and their transitions
states = list()
# hot -> snapshot -> cold -> closed -> deleted
# hot state is default and always exists
policyDict['default_state'] = POLICY_STATE_HOT
hotState = dict()
hotState['name'] = POLICY_STATE_HOT
hotState['actions'] = [{ 'replica_count' : { 'number_of_replicas' : args.hotReplicaCount}}]
states.append(hotState)
# create a "snapshot" state for backup and set the previous state's transition to it
if (args.snapshotAge != '0'):
snapshotState = dict()
snapshotState['name'] = POLICY_STATE_SNAPSHOT
snapshotState['actions'] = [{ 'snapshot' : { 'repository' : args.snapshotRepo, 'snapshot' : args.snapshotName}}]
states[len(states)-1]['transitions'] = [{'state_name' : POLICY_STATE_SNAPSHOT,
'conditions' : { 'min_index_age' : args.snapshotAge}}]
states.append(snapshotState)
# create a "cold" state for read-only indices and set the previous state's transition to it
if (args.coldAge != '0'):
coldState = dict()
coldState['name'] = POLICY_STATE_COLD
coldState['actions'] = [{ 'read_only' : {}}]
states[len(states)-1]['transitions'] = [{'state_name' : POLICY_STATE_COLD,
'conditions' : { 'min_index_age' : args.coldAge}}]
states.append(coldState)
# create a "closed" state for closed indices and set the previous state's transition to it
if (args.closeAge != '0'):
closedState = dict()
closedState['name'] = POLICY_STATE_CLOSED
closedState['actions'] = [{ 'close' : {}}]
states[len(states)-1]['transitions'] = [{'state_name' : POLICY_STATE_CLOSED,
'conditions' : { 'min_index_age' : args.closeAge}}]
states.append(closedState)
# create a "deleted" state for deleted indices and set the previous state's transition to it
if (args.deleteAge != '0'):
deleteState = dict()
deleteState['name'] = POLICY_STATE_DELETE
deleteState['actions'] = [{ 'delete' : {}}]
states[len(states)-1]['transitions'] = [{'state_name' : POLICY_STATE_DELETE,
'conditions' : { 'min_index_age' : args.deleteAge}}]
states.append(deleteState)
# the final state doesn't transition
states[len(states)-1]['transitions'] = []
policyDict['states'] = states
policyDict['ism_template'] = { 'index_patterns' : [x.strip() for x in args.indexPattern.split(',')],
'priority' : args.templatePriority }
policy = dict()
policy['policy'] = policyDict
print(json.dumps(policy))
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,130 @@
#!/bin/bash
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
set -euo pipefail
shopt -s nocasematch
if [[ -n $ELASTICSEARCH_URL ]]; then
ES_URL="$ELASTICSEARCH_URL"
elif [[ -n $ES_HOST ]] && [[ -n $ES_PORT ]]; then
ES_URL="http://$ES_HOST:$ES_PORT"
else
ES_URL="http://elasticsearch:9200"
fi
if [[ -n $KIBANA_URL ]]; then
KIB_URL="$KIBANA_URL"
elif [[ -n $KIBANA_HOST ]] && [[ -n $KIBANA_PORT ]]; then
KIB_URL="http://$KIBANA_HOST:$KIBANA_PORT"
else
KIB_URL="http://kibana:5601/kibana"
fi
INDEX_PATTERN=${ARKIME_INDEX_PATTERN:-"sessions2-*"}
INDEX_PATTERN_ID=${ARKIME_INDEX_PATTERN_ID:-"sessions2-*"}
INDEX_TIME_FIELD=${ARKIME_INDEX_TIME_FIELD:-"firstPacket"}
INDEX_POLICY_FILE="/data/init/index-management-policy.json"
INDEX_POLICY_FILE_HOST="/data/index-management-policy.json"
ZEEK_TEMPLATE_FILE="/data/init/zeek_template.json"
ZEEK_TEMPLATE_FILE_ORIG="/data/zeek_template.json"
INDEX_POLICY_NAME=${ISM_POLICY_NAME:-"session_index_policy"}
# is the argument to automatically create this index enabled?
if [[ "$CREATE_ES_ARKIME_SESSION_INDEX" = "true" ]] ; then
# give Elasticsearch time to start before configuring Kibana
/data/elastic_search_status.sh >/dev/null 2>&1
# is the kibana process server up and responding to requests?
if curl -L --silent --output /dev/null --fail -XGET "$KIB_URL/api/status" ; then
# have we not not already created the index pattern?
if ! curl -L --silent --output /dev/null --fail -XGET "$KIB_URL/api/saved_objects/index-pattern/$INDEX_PATTERN_ID" ; then
echo "Elasticsearch is running! Setting up index management policies..."
# register the repo location for elasticsearch snapshots
/data/register-elasticsearch-snapshot-repo.sh
# tweak the sessions template (sessions2-* zeek template file) to use the index management policy
if [[ -f "$INDEX_POLICY_FILE_HOST" ]] && (( $(jq length "$INDEX_POLICY_FILE_HOST") > 0 )); then
# user has provided a file for index management, use it
cp "$INDEX_POLICY_FILE_HOST" "$INDEX_POLICY_FILE"
INDEX_POLICY_NAME="$(cat "$INDEX_POLICY_FILE" | jq '..|objects|.policy_id//empty' | tr -d '"')"
else
# need to generate index management file based on environment variables
/data/elastic_index_policy_create.py \
--policy "$INDEX_POLICY_NAME" \
--index-pattern "$INDEX_PATTERN" \
--priority 100 \
--snapshot ${ISM_SNAPSHOT_AGE:-"0"} \
--cold ${ISM_COLD_AGE:-"0"} \
--close ${ISM_CLOSE_AGE:-"0"} \
--delete ${ISM_DELETE_AGE:-"0"} \
> "$INDEX_POLICY_FILE"
fi
if [[ -f "$INDEX_POLICY_FILE" ]]; then
# make API call to define index management policy
# https://opendistro.github.io/for-elasticsearch-docs/docs/ism/api/#create-policy
curl -w "\n" -L --silent --output /dev/null --show-error -XPUT -H "Content-Type: application/json" "$ES_URL/_opendistro/_ism/policies/$INDEX_POLICY_NAME" -d "@$INDEX_POLICY_FILE"
if [[ -f "$ZEEK_TEMPLATE_FILE_ORIG" ]]; then
# insert opendistro.index_state_management.policy_id into index template settings: will be
# imported by kibana-create-moloch-sessions-index.sh
cat "$ZEEK_TEMPLATE_FILE_ORIG" | jq ".settings += {\"opendistro.index_state_management.policy_id\": \"$INDEX_POLICY_NAME\"}" > "$ZEEK_TEMPLATE_FILE"
fi
fi
echo "Importing zeek_template..."
if [[ -f "$ZEEK_TEMPLATE_FILE_ORIG" ]] && [[ ! -f "$ZEEK_TEMPLATE_FILE" ]]; then
cp "$ZEEK_TEMPLATE_FILE_ORIG" "$ZEEK_TEMPLATE_FILE"
fi
# load zeek_template containing zeek field type mappings (merged from /data/zeek_template.json to /data/init/zeek_template.json in kibana_helpers.sh on startup)
curl -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" \
"$ES_URL/_template/zeek_template?include_type_name=true" -d "@$ZEEK_TEMPLATE_FILE" 2>&1
echo "Importing index pattern..."
# From https://github.com/elastic/kibana/issues/3709
# Create index pattern
curl -w "\n" -sSL --fail -XPOST -H "Content-Type: application/json" -H "kbn-xsrf: anything" \
"$KIB_URL/api/saved_objects/index-pattern/$INDEX_PATTERN_ID" \
-d"{\"attributes\":{\"title\":\"$INDEX_PATTERN\",\"timeFieldName\":\"$INDEX_TIME_FIELD\"}}" 2>&1
echo "Setting default index pattern..."
# Make it the default index
curl -w "\n" -sSL -XPOST -H "Content-Type: application/json" -H "kbn-xsrf: anything" \
"$KIB_URL/api/kibana/settings/defaultIndex" \
-d"{\"value\":\"$INDEX_PATTERN_ID\"}"
echo "Importing Kibana saved objects..."
# install default dashboards, index patterns, etc.
for i in /opt/kibana/dashboards/*.json; do
curl -L --silent --output /dev/null --show-error -XPOST "$KIB_URL/api/kibana/dashboards/import?force=true" -H 'kbn-xsrf:true' -H 'Content-type:application/json' -d "@$i"
done
# set dark theme
curl -L --silent --output /dev/null --show-error -XPOST "$KIB_URL/api/kibana/settings/theme:darkMode" -H 'kbn-xsrf:true' -H 'Content-type:application/json' -d '{"value":true}'
# set default query time range
curl -L --silent --output /dev/null --show-error -XPOST "$KIB_URL/api/kibana/settings" -H 'kbn-xsrf:true' -H 'Content-type:application/json' -d \
'{"changes":{"timepicker:timeDefaults":"{\n \"from\": \"now-24h\",\n \"to\": \"now\",\n \"mode\": \"quick\"}"}}'
# turn off telemetry
curl -L --silent --output /dev/null --show-error -XPOST "$KIB_URL/api/telemetry/v2/optIn" -H 'kbn-xsrf:true' -H 'Content-type:application/json' -d '{"enabled":false}'
# pin filters by default
curl -L --silent --output /dev/null --show-error -XPOST "$KIB_URL/api/kibana/settings/filters:pinnedByDefault" -H 'kbn-xsrf:true' -H 'Content-type:application/json' -d '{"value":true}'
echo "Kibana saved objects import complete!"
fi
fi
fi

View File

@@ -0,0 +1,291 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import json
import re
import requests
import os
import sys
GET_STATUS_API = 'api/status'
GET_INDEX_PATTERN_INFO_URI = 'api/saved_objects/_find'
GET_FIELDS_URI = 'api/index_patterns/_fields_for_wildcard'
PUT_INDEX_PATTERN_URI = 'api/saved_objects/index-pattern'
ES_GET_TEMPLATE_URI = '_template'
###################################################################################################
debug = False
scriptName = os.path.basename(__file__)
scriptPath = os.path.dirname(os.path.realpath(__file__))
origPath = os.getcwd()
###################################################################################################
# print to stderr
def eprint(*args, **kwargs):
print(*args, file=sys.stderr, **kwargs)
###################################################################################################
# convenient boolean argument parsing
def str2bool(v):
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
###################################################################################################
# main
def main():
global debug
parser = argparse.ArgumentParser(description=scriptName, add_help=False, usage='{} <arguments>'.format(scriptName))
parser.add_argument('-v', '--verbose', dest='debug', type=str2bool, nargs='?', const=True, default=False, help="Verbose output")
parser.add_argument('-i', '--index', dest='index', metavar='<str>', type=str, default='sessions2-*', help='Index Pattern Name')
parser.add_argument('-k', '--kibana', dest='kibanaUrl', metavar='<protocol://host:port>', type=str, default=os.getenv('KIBANA_URL', 'http://kibana:5601/kibana'), help='Kibana URL')
parser.add_argument('-e', '--elastic', dest='elasticUrl', metavar='<protocol://host:port>', type=str, default=os.getenv('ELASTICSEARCH_URL', 'http://elasticsearch:9200'), help='Elasticsearch URL')
parser.add_argument('-t', '--template', dest='template', metavar='<str>', type=str, default=None, help='Elasticsearch template to merge')
parser.add_argument('-n', '--dry-run', dest='dryrun', type=str2bool, nargs='?', const=True, default=False, help="Dry run (no PUT)")
try:
parser.error = parser.exit
args = parser.parse_args()
except SystemExit:
parser.print_help()
exit(2)
debug = args.debug
if debug:
eprint(os.path.join(scriptPath, scriptName))
eprint("Arguments: {}".format(sys.argv[1:]))
eprint("Arguments: {}".format(args))
else:
sys.tracebacklimit = 0
# get version number so kibana doesn't think we're doing a XSRF when we do the PUT
statusInfoResponse = requests.get('{}/{}'.format(args.kibanaUrl, GET_STATUS_API))
statusInfoResponse.raise_for_status()
statusInfo = statusInfoResponse.json()
kibanaVersion = statusInfo['version']['number']
if debug:
eprint('Kibana version is {}'.format(kibanaVersion))
esInfoResponse = requests.get(args.elasticUrl)
esInfo = statusInfoResponse.json()
elasticVersion = statusInfo['version']['number']
if debug:
eprint('Elasticsearch version is {}'.format(elasticVersion))
# find the ID of the index name (probably will be the same as the name)
getIndexInfoResponse = requests.get(
'{}/{}'.format(args.kibanaUrl, GET_INDEX_PATTERN_INFO_URI),
params={
'type': 'index-pattern',
'fields': 'id',
'search': '"{}"'.format(args.index)
}
)
getIndexInfoResponse.raise_for_status()
getIndexInfo = getIndexInfoResponse.json()
indexId = getIndexInfo['saved_objects'][0]['id'] if (len(getIndexInfo['saved_objects']) > 0) else None
if debug:
eprint('Index ID for {} is {}'.format(args.index, indexId))
if indexId is not None:
# get the current fields list
getFieldsResponse = requests.get('{}/{}'.format(args.kibanaUrl, GET_FIELDS_URI),
params={ 'pattern': args.index,
'meta_fields': ["_source","_id","_type","_index","_score"] })
getFieldsResponse.raise_for_status()
getFieldsList = getFieldsResponse.json()['fields']
fieldsNames = [field['name'] for field in getFieldsList if 'name' in field]
# get the fields from the template, if specified, and merge those into the fields list
if args.template is not None:
try:
# request template from elasticsearch and pull the mappings/properties (field list) out
getTemplateResponse = requests.get('{}/{}/{}'.format(args.elasticUrl, ES_GET_TEMPLATE_URI, args.template))
getTemplateResponse.raise_for_status()
getTemplateInfo = getTemplateResponse.json()[args.template]['mappings']['properties']
# a field should be merged if it's not already in the list we have from kibana, and it's
# in the list of types we're merging (leave more complex types like nested and geolocation
# to be handled naturally as the data shows up)
for field in getTemplateInfo:
mergeFieldTypes = ("date", "float", "integer", "ip", "keyword", "long", "short", "text")
if ((field not in fieldsNames) and
('type' in getTemplateInfo[field]) and
(getTemplateInfo[field]['type'] in mergeFieldTypes)):
# create field dict in same format as those returned by GET_FIELDS_URI above
mergedFieldInfo = {}
mergedFieldInfo['name'] = field
mergedFieldInfo['esTypes'] = [ getTemplateInfo[field]['type'] ]
if ((getTemplateInfo[field]['type'] == 'float') or
(getTemplateInfo[field]['type'] == 'integer') or
(getTemplateInfo[field]['type'] == 'long') or
(getTemplateInfo[field]['type'] == 'short')):
mergedFieldInfo['type'] = 'number'
elif ((getTemplateInfo[field]['type'] == 'keyword') or
(getTemplateInfo[field]['type'] == 'text')):
mergedFieldInfo['type'] = 'string'
else:
mergedFieldInfo['type'] = getTemplateInfo[field]['type']
mergedFieldInfo['searchable'] = True
mergedFieldInfo['aggregatable'] = ("text" not in mergedFieldInfo['esTypes'])
mergedFieldInfo['readFromDocValues'] = mergedFieldInfo['aggregatable']
fieldsNames.append(field)
getFieldsList.append(mergedFieldInfo)
# elif debug:
# eprint('Not merging {}: {}'.format(field, json.dumps(getTemplateInfo[field])))
except Exception as e:
eprint('"{}" raised for "{}", skipping template merge'.format(str(e), args.template))
if debug:
eprint('{} would have {} fields'.format(args.index, len(getFieldsList)))
# define field formatting map for Kibana -> Arkime drilldown and other URL drilldowns
#
# see: https://github.com/cisagov/Malcolm/issues/133
# https://github.com/mmguero-dev/kibana-plugin-drilldownmenu
#
# fieldFormatMap is
# {
# "zeek.orig_h": {
# "id": "drilldown",
# "params": {
# "parsedUrl": {
# "origin": "https://malcolm.local.lan",
# "pathname": "/kibana/app/kibana",
# "basePath": "/kibana"
# },
# "urlTemplates": [
# null,
# {
# "url": "/idkib2mol/zeek.orig_h == {{value}}",
# "label": "Arkime: zeek.orig_h == {{value}}"
# }
# ]
# }
# },
# ...
# }
fieldFormatMap = {}
for field in getFieldsList:
if field['name'][:1].isalpha():
# for Arkime to query by database field name, see moloch issue/PR 1461/1463
valQuote = '"' if field['type'] == 'string' else ''
valDbPrefix = '' if field['name'].startswith('zeek') else 'db:'
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = '/idkib2mol/{}{} == {}{{{{value}}}}{}'.format(valDbPrefix, field['name'], valQuote, valQuote)
drilldownInfoParamsUrlTemplateValues['label'] = 'Arkime {}: {}{{{{value}}}}{}'.format(field['name'], valQuote, valQuote)
drilldownInfoParamsUrlTemplates = [None, drilldownInfoParamsUrlTemplateValues]
if (field['type'] == 'ip') or (re.search(r'[_\.-](h|ip)$', field['name'], re.IGNORECASE) is not None):
# add drilldown for searching IANA for IP addresses
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://www.virustotal.com/en/ip-address/{{value}}/information/'
drilldownInfoParamsUrlTemplateValues['label'] = 'VirusTotal IP: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'(^|[\b_\.-])(md5|sha(1|256|384|512))\b', field['name'], re.IGNORECASE) is not None:
# add drilldown for searching VirusTotal for hash signatures
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://www.virustotal.com/gui/file/{{value}}/detection'
drilldownInfoParamsUrlTemplateValues['label'] = 'VirusTotal Hash: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'(^|[\b_\.-])(hit|signature(_?id))?s?$', field['name'], re.IGNORECASE) is not None:
# add drilldown for searching the web for signature IDs
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://duckduckgo.com/?q="{{value}}"'
drilldownInfoParamsUrlTemplateValues['label'] = 'Web Search: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'(^|src|dst|source|dest|destination|[\b_\.-])p(ort)?s?$', field['name'], re.IGNORECASE) is not None:
# add drilldown for searching IANA for ports
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml?search={{value}}'
drilldownInfoParamsUrlTemplateValues['label'] = 'Port Registry: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'^(zeek\.service|protocol?|network\.protocol)$', field['name'], re.IGNORECASE) is not None:
# add drilldown for searching IANA for services
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml?search={{value}}'
drilldownInfoParamsUrlTemplateValues['label'] = 'Service Registry: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'^(network\.transport|zeek\.proto|ipProtocol)$', field['name'], re.IGNORECASE) is not None:
# add URL link for assigned transport protocol numbers
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://www.iana.org/assignments/protocol-numbers/protocol-numbers.xhtml'
drilldownInfoParamsUrlTemplateValues['label'] = 'Protocol Registry'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'(as\.number|(src|dst)ASN|asn\.(src|dst))$', field['name'], re.IGNORECASE) is not None:
# add drilldown for searching ARIN for ASN
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://search.arin.net/rdap/?query={{value}}&searchFilter=asn'
drilldownInfoParamsUrlTemplateValues['label'] = 'ARIN ASN: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'(^zeek\.filetype$|mime[_\.-]?type)', field['name'], re.IGNORECASE) is not None:
# add drilldown for searching mime/media/content types
# TODO: '/' in URL is getting messed up somehow, maybe we need to url encode it manually? not sure...
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = 'https://www.iana.org/assignments/media-types/{{value}}'
drilldownInfoParamsUrlTemplateValues['label'] = 'Media Type Registry: {{value}}'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
elif re.search(r'(^zeek_files\.extracted$)', field['name'], re.IGNORECASE) is not None:
# add download for extracted/quarantined zeek files
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = '/dl-extracted-files/quarantine/{{value}}'
drilldownInfoParamsUrlTemplateValues['label'] = 'Download (if quarantined)'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
drilldownInfoParamsUrlTemplateValues = {}
drilldownInfoParamsUrlTemplateValues['url'] = '/dl-extracted-files/preserved/{{value}}'
drilldownInfoParamsUrlTemplateValues['label'] = 'Download (if preserved)'
drilldownInfoParamsUrlTemplates.append(drilldownInfoParamsUrlTemplateValues)
drilldownInfoParams = {}
drilldownInfoParams['urlTemplates'] = drilldownInfoParamsUrlTemplates
drilldownInfo = {}
drilldownInfo['id'] = 'drilldown'
drilldownInfo['params'] = drilldownInfoParams
fieldFormatMap[field['name']] = drilldownInfo
# set the index pattern with our complete list of fields
putIndexInfo = {}
putIndexInfo['attributes'] = {}
putIndexInfo['attributes']['title'] = args.index
putIndexInfo['attributes']['fields'] = json.dumps(getFieldsList)
putIndexInfo['attributes']['fieldFormatMap'] = json.dumps(fieldFormatMap)
if not args.dryrun:
putResponse = requests.put('{}/{}/{}'.format(args.kibanaUrl, PUT_INDEX_PATTERN_URI, indexId),
headers={ 'Content-Type': 'application/json',
'kbn-xsrf': 'true',
'kbn-version': kibanaVersion, },
data=json.dumps(putIndexInfo))
putResponse.raise_for_status()
# if we got this far, it probably worked!
if args.dryrun:
print("success (dry run only, no write performed)")
else:
print("success")
else:
print("failure (could not find Index ID for {})".format(args.index))
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,19 @@
#!/bin/bash
# Copyright (c) 2021 Battelle Energy Alliance, LLC. All rights reserved.
if [ $# -gt 0 ]; then
ES_URL="$1"
elif [[ -n $ELASTICSEARCH_URL ]]; then
ES_URL="$ELASTICSEARCH_URL"
elif [[ -n $ES_HOST ]] && [[ -n $ES_PORT ]]; then
ES_URL="http://$ES_HOST:$ES_PORT"
else
ES_URL="http://elasticsearch:9200"
fi
[[ -n $ISM_SNAPSHOT_REPO ]] && \
curl -w "\n" -H "Accept: application/json" \
-H "Content-type: application/json" \
-XPUT -fsSL "$ES_URL/_snapshot/$ISM_SNAPSHOT_REPO" \
-d "{ \"type\": \"fs\", \"settings\": { \"location\": \"$ISM_SNAPSHOT_REPO\", \"compress\": ${ISM_SNAPSHOT_COMPRESSED:-false} } }"