first commit
This commit is contained in:
9
data/apidemo-cron/build/Dockerfile
Normal file
9
data/apidemo-cron/build/Dockerfile
Normal file
@@ -0,0 +1,9 @@
|
||||
FROM ubuntu:22.04
|
||||
|
||||
RUN apt-get update && apt-get -y upgrade
|
||||
RUN apt-get -y install cron curl jq dos2unix
|
||||
|
||||
COPY entrypoint.sh /opt/entrypoint.sh
|
||||
RUN dos2unix /opt/entrypoint.sh ; chmod +x /opt/entrypoint.sh
|
||||
|
||||
CMD ["sh", "/opt/entrypoint.sh"]
|
||||
8
data/apidemo-cron/build/entrypoint.sh
Normal file
8
data/apidemo-cron/build/entrypoint.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
dos2unix $COMMAND
|
||||
|
||||
echo "$SCHEDULE $USER $COMMAND" > /etc/cron.d/api-cronjob
|
||||
chmod 0644 /etc/cron.d/api-cronjob
|
||||
crontab /etc/cron.d/api-cronjob
|
||||
touch /var/log/cron.log
|
||||
env > /etc/environment && cron -f
|
||||
5
data/apidemo-cron/scripts/get_cryptocurrency.sh
Normal file
5
data/apidemo-cron/scripts/get_cryptocurrency.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/bin/bash
|
||||
DATE=`date +"%Y-%m-%d"`
|
||||
curl https://api.coindesk.com/v1/bpi/currentprice.json > /tmp/cryptocurrency.json
|
||||
jq -c 'del(.disclaimer)' /tmp/cryptocurrency.json >> /opt/output/cryptocurrency_$DATE.json
|
||||
find /opt/output/ -mtime +5 -delete
|
||||
13
data/apidemo-filebeat/config/filebeat.yml
Normal file
13
data/apidemo-filebeat/config/filebeat.yml
Normal file
@@ -0,0 +1,13 @@
|
||||
filebeat.inputs:
|
||||
- type: log
|
||||
enabled: true
|
||||
paths:
|
||||
- ${INPUT_PATH}
|
||||
|
||||
filebeat.config.modules:
|
||||
path: ${path.config}/modules.d/*.yml
|
||||
reload.enabled: false
|
||||
|
||||
output.logstash:
|
||||
enabled: true
|
||||
hosts: ["${LOGSTASH_HOST}"]
|
||||
26
data/apidemo-logstash/config/logstash.conf
Normal file
26
data/apidemo-logstash/config/logstash.conf
Normal file
@@ -0,0 +1,26 @@
|
||||
input {
|
||||
beats {
|
||||
port => 5044
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
json {
|
||||
source => "message"
|
||||
}
|
||||
}
|
||||
|
||||
output {
|
||||
#stdout {}
|
||||
#file {
|
||||
# path => "/tmp/output.json"
|
||||
#}
|
||||
opensearch {
|
||||
hosts => ["${OPENSEARCH_HOST}"]
|
||||
index => "${OPENSEARCH_INDEX}-%{+YYYY-MM-dd}"
|
||||
user => "${LOGSTASH_USER}"
|
||||
password => "${LOGSTASH_PASSWORD}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
}
|
||||
}
|
||||
23
data/beats-logstash/config/logstash.conf
Normal file
23
data/beats-logstash/config/logstash.conf
Normal file
@@ -0,0 +1,23 @@
|
||||
input {
|
||||
beats {
|
||||
port => 5044
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
}
|
||||
|
||||
output {
|
||||
#stdout {}
|
||||
#file {
|
||||
# path => "/tmp/output.json"
|
||||
#}
|
||||
opensearch {
|
||||
hosts => ["${OPENSEARCH_HOST}"]
|
||||
index => "${OPENSEARCH_INDEX}-%{+YYYY-MM-dd}"
|
||||
user => "${LOGSTASH_USER}"
|
||||
password => "${LOGSTASH_PASSWORD}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
}
|
||||
}
|
||||
9
data/mdns/build/Dockerfile
Normal file
9
data/mdns/build/Dockerfile
Normal file
@@ -0,0 +1,9 @@
|
||||
FROM ubuntu:22.04
|
||||
|
||||
RUN apt-get update && apt-get -y upgrade
|
||||
RUN apt-get -y install avahi-utils libnss-mdns dos2unix
|
||||
|
||||
ADD entrypoint.sh /opt/entrypoint.sh
|
||||
RUN dos2unix /opt/entrypoint.sh ; chmod +x /opt/entrypoint.sh
|
||||
|
||||
CMD ["sh", "/opt/entrypoint.sh"]
|
||||
15
data/mdns/build/entrypoint.sh
Normal file
15
data/mdns/build/entrypoint.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
service dbus start
|
||||
service avahi-daemon start
|
||||
|
||||
dos2unix /opt/config/names.csv
|
||||
|
||||
while read LINE; do
|
||||
PUBLISH_HOSTNAME=$(echo $LINE | cut -d ";" -f 1)
|
||||
PUBLISH_IP=$(echo $LINE | cut -d ";" -f 2)
|
||||
echo "$PUBLISH_HOSTNAME - $PUBLISH_IP"
|
||||
/usr/bin/avahi-publish -a -R $PUBLISH_HOSTNAME $PUBLISH_IP &
|
||||
done < /opt/config/names.csv
|
||||
|
||||
tail -f /dev/null
|
||||
3
data/mdns/config/names.csv
Normal file
3
data/mdns/config/names.csv
Normal file
@@ -0,0 +1,3 @@
|
||||
opensearch.local;192.168.57.2
|
||||
traefik.local;192.168.57.2
|
||||
grafana.local;192.168.57.2
|
||||
|
59
data/opensearch-node1/config/internal_users_example.yml
Normal file
59
data/opensearch-node1/config/internal_users_example.yml
Normal file
@@ -0,0 +1,59 @@
|
||||
---
|
||||
# This is the internal user database
|
||||
# The hash value is a bcrypt hash and can be generated with /usr/share/opensearch/plugins/opensearch-security/tools/hash.sh
|
||||
|
||||
_meta:
|
||||
type: "internalusers"
|
||||
config_version: 2
|
||||
|
||||
admin:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: true
|
||||
backend_roles:
|
||||
- "admin"
|
||||
description: "Demo admin user"
|
||||
|
||||
anomalyadmin:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: false
|
||||
opendistro_security_roles:
|
||||
- "anomaly_full_access"
|
||||
description: "Demo anomaly admin user, using internal role"
|
||||
|
||||
kibanaserver:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: true
|
||||
description: "Demo OpenSearch Dashboards user"
|
||||
|
||||
kibanaro:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: false
|
||||
backend_roles:
|
||||
- "kibanauser"
|
||||
- "readall"
|
||||
attributes:
|
||||
attribute1: "value1"
|
||||
attribute2: "value2"
|
||||
attribute3: "value3"
|
||||
description: "Demo OpenSearch Dashboards read only user, using external role mapping"
|
||||
|
||||
logstash:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: false
|
||||
backend_roles:
|
||||
- "logstash"
|
||||
description: "Demo logstash user, using external role mapping"
|
||||
|
||||
readall:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: false
|
||||
backend_roles:
|
||||
- "readall"
|
||||
description: "Demo readall user, using external role mapping"
|
||||
|
||||
snapshotrestore:
|
||||
hash: "$2y$12$x22en27Ec7WS8OmtW1MxMeu7l0GHHrSwEn3HMH/o4JcKeeAQ.UGFK"
|
||||
reserved: false
|
||||
backend_roles:
|
||||
- "snapshotrestore"
|
||||
description: "Demo snapshotrestore user, using external role mapping"
|
||||
17
data/opensearch-node1/config/opensearch.yml
Normal file
17
data/opensearch-node1/config/opensearch.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
cluster.name: docker-cluster
|
||||
network.host: 0.0.0.0
|
||||
plugins.security.authcz.admin_dn:
|
||||
- "CN=admin,O=security,L=IT,ST=NY,C=US"
|
||||
plugins.security.nodes_dn:
|
||||
- "CN=opensearch-node*"
|
||||
plugins.security.ssl.transport.enforce_hostname_verification: false
|
||||
plugins.security.ssl.transport.resolve_hostname: false
|
||||
plugins.security.ssl.http.enabled: true
|
||||
plugins.security.allow_unsafe_democertificates: true
|
||||
plugins.security.allow_default_init_securityindex: true
|
||||
plugins.security.audit.type: internal_opensearch
|
||||
plugins.security.enable_snapshot_restore_privilege: true
|
||||
plugins.security.check_snapshot_restore_write_privileges: true
|
||||
plugins.security.restapi.roles_enabled: ["all_access", "security_rest_api_access"]
|
||||
plugins.security.system_indices.enabled: true
|
||||
plugins.security.system_indices.indices: [".plugins-ml-model", ".plugins-ml-task", ".opendistro-alerting-config", ".opendistro-alerting-alert*", ".opendistro-anomaly-results*", ".opendistro-anomaly-detector*", ".opendistro-anomaly-checkpoints", ".opendistro-anomaly-detection-state", ".opendistro-reports-*", ".opensearch-notifications-*", ".opensearch-notebooks", ".opensearch-observability", ".opendistro-asynchronous-search-response*", ".replication-metadata-store"]
|
||||
17
data/opensearch-node2/config/opensearch.yml
Normal file
17
data/opensearch-node2/config/opensearch.yml
Normal file
@@ -0,0 +1,17 @@
|
||||
cluster.name: docker-cluster
|
||||
network.host: 0.0.0.0
|
||||
plugins.security.authcz.admin_dn:
|
||||
- "CN=admin,O=security,L=IT,ST=NY,C=US"
|
||||
plugins.security.nodes_dn:
|
||||
- "CN=opensearch-node*"
|
||||
plugins.security.ssl.transport.enforce_hostname_verification: false
|
||||
plugins.security.ssl.transport.resolve_hostname: false
|
||||
plugins.security.ssl.http.enabled: true
|
||||
plugins.security.allow_unsafe_democertificates: true
|
||||
plugins.security.allow_default_init_securityindex: true
|
||||
plugins.security.audit.type: internal_opensearch
|
||||
plugins.security.enable_snapshot_restore_privilege: true
|
||||
plugins.security.check_snapshot_restore_write_privileges: true
|
||||
plugins.security.restapi.roles_enabled: ["all_access", "security_rest_api_access"]
|
||||
plugins.security.system_indices.enabled: true
|
||||
plugins.security.system_indices.indices: [".plugins-ml-model", ".plugins-ml-task", ".opendistro-alerting-config", ".opendistro-alerting-alert*", ".opendistro-anomaly-results*", ".opendistro-anomaly-detector*", ".opendistro-anomaly-checkpoints", ".opendistro-anomaly-detection-state", ".opendistro-reports-*", ".opensearch-notifications-*", ".opensearch-notebooks", ".opensearch-observability", ".opendistro-asynchronous-search-response*", ".replication-metadata-store"]
|
||||
30
data/setup/build/01_precreate_folders.sh
Normal file
30
data/setup/build/01_precreate_folders.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
#!/bin/bash
|
||||
|
||||
mkdir /data/graylog-mongodb/configdb
|
||||
mkdir /data/graylog-mongodb/db
|
||||
chmod 777 /data/graylog-mongodb/configdb
|
||||
chmod 777 /data/graylog-mongodb/db
|
||||
|
||||
if [ ! -f /data/.env ]
|
||||
then
|
||||
cp /data/env_example /data/.env
|
||||
fi
|
||||
|
||||
if [ ! -f /data/opensearch-node1/config/internal_users.yml ]
|
||||
then
|
||||
cp /data/opensearch-node1/config/internal_users_example.yml /data/opensearch-node1/config/internal_users.yml
|
||||
mkdir /data/opensearch-node2/config/
|
||||
cp /data/opensearch-node1/config/internal_users_example.yml /data/opensearch-node2/config/internal_users.yml
|
||||
fi
|
||||
|
||||
if [ ! -d "/data/opensearch-node1/data/" ]
|
||||
then
|
||||
echo "creating opensearch node1 data directoy"
|
||||
mkdir -p /data/opensearch-node1/data/
|
||||
fi
|
||||
|
||||
if [ ! -d "/data/opensearch-node2/data/" ]
|
||||
then
|
||||
echo "creating opensearch node2 data directoy"
|
||||
mkdir -p /data/opensearch-node2/data/
|
||||
fi
|
||||
109
data/setup/build/02_generate_certificates.sh
Normal file
109
data/setup/build/02_generate_certificates.sh
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/bin/bash
|
||||
|
||||
if [ ! -f /data/certificates/certs/opensearch-ca.key ]
|
||||
then
|
||||
echo "generating CA"
|
||||
mkdir -p /data/certificates/certs/
|
||||
openssl genrsa -out /data/certificates/certs/opensearch-ca.key 2048
|
||||
openssl req -new -x509 -sha256 -days 3650 -subj "/C=US/ST=NY/L=IT/O=security/CN=opensearch-ca" -key /data/certificates/certs/opensearch-ca.key -out /data/certificates/certs/opensearch-ca.pem
|
||||
openssl x509 -noout -subject -in /data/certificates/certs/opensearch-ca.pem
|
||||
fi
|
||||
|
||||
if [ ! -f /data/certificates/certs/opensearch-admin.key ]
|
||||
then
|
||||
echo "generating admin user key"
|
||||
mkdir -p /data/certificates/certs/
|
||||
openssl genrsa -out /data/certificates/certs/opensearch-admin_rsa.key 2048
|
||||
openssl pkcs8 -v1 PBE-SHA1-3DES -nocrypt -in /data/certificates/certs/opensearch-admin_rsa.key -topk8 -out /data/certificates/certs/opensearch-admin.key
|
||||
openssl req -new -inform PEM -outform PEM -subj "/C=US/ST=NY/L=IT/O=security/CN=admin" -key /data/certificates/certs/opensearch-admin.key -out /data/certificates/certs/opensearch-admin.csr
|
||||
openssl x509 -req -days 3650 -in /data/certificates/certs/opensearch-admin.csr -CA /data/certificates/certs/opensearch-ca.pem -CAkey /data/certificates/certs/opensearch-ca.key -CAcreateserial -sha256 -out /data/certificates/certs/opensearch-admin.pem
|
||||
#openssl verify -CAfile /data/certificates/certs/opensearch-ca.pem /data/certificates/certs/opensearch-admin.pem
|
||||
#openssl x509 -noout -subject -in /data/certificates/certs/opensearch-admin.pem
|
||||
fi
|
||||
|
||||
if [ ! -f /data/opensearch-node1/certs/opensearch-node1.key ]
|
||||
then
|
||||
for NODE_NAME in "node1" "node2"
|
||||
do
|
||||
echo "generating certificate opensearch-$NODE_NAME"
|
||||
mkdir -p /data/opensearch-$NODE_NAME/certs/
|
||||
|
||||
cat << EOF > /tmp/request.conf
|
||||
[req]
|
||||
distinguished_name = req_distinguished_name
|
||||
req_extensions = v3_req
|
||||
prompt = no
|
||||
[req_distinguished_name]
|
||||
C = US
|
||||
ST = NY
|
||||
L = IT
|
||||
O = security
|
||||
CN = opensearch-$NODE_NAME
|
||||
[v3_req]
|
||||
keyUsage = keyEncipherment, dataEncipherment, digitalSignature, nonRepudiation
|
||||
extendedKeyUsage = serverAuth, clientAuth
|
||||
subjectAltName = @alt_names
|
||||
[alt_names]
|
||||
DNS.1 = docker-cluster
|
||||
DNS.2 = opensearch-$NODE_NAME
|
||||
RID.1 = 1.2.3.4.5.5
|
||||
EOF
|
||||
|
||||
openssl genrsa -out /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME-rsa.key 2048
|
||||
openssl pkcs8 -inform PEM -outform PEM -in /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME-rsa.key -topk8 -nocrypt -v1 PBE-SHA1-3DES -out /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.key
|
||||
openssl req -new -config /tmp/request.conf -key /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.key -out /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.csr
|
||||
openssl x509 -req -days 3650 -extfile /tmp/request.conf -extensions v3_req -in /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.csr -CA /data/certificates/certs/opensearch-ca.pem -CAkey /data/certificates/certs/opensearch-ca.key -CAcreateserial -sha256 -out /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.pem
|
||||
|
||||
cp /data/certificates/certs/opensearch-ca.pem /data/opensearch-$NODE_NAME/certs/
|
||||
cp /data/certificates/certs/opensearch-admin.pem /data/opensearch-$NODE_NAME/certs/
|
||||
cp /data/certificates/certs/opensearch-admin.key /data/opensearch-$NODE_NAME/certs/
|
||||
|
||||
#openssl verify -CAfile /data/opensearch-$NODE_NAME/certs/opensearch-ca.pem /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.pem
|
||||
#openssl x509 -text -in /data/opensearch-$NODE_NAME/certs/opensearch-$NODE_NAME.pem
|
||||
done
|
||||
fi
|
||||
|
||||
if [ ! -f /data/traefik/certs/traefik.key ]
|
||||
then
|
||||
echo "generating certificate traefik"
|
||||
mkdir -p /data/traefik/certs/
|
||||
|
||||
cat << EOF > /tmp/request.conf
|
||||
[req]
|
||||
distinguished_name = req_distinguished_name
|
||||
req_extensions = v3_req
|
||||
prompt = no
|
||||
[req_distinguished_name]
|
||||
C = US
|
||||
ST = NY
|
||||
L = IT
|
||||
O = security
|
||||
CN = opensearch-lab
|
||||
[v3_req]
|
||||
keyUsage = keyEncipherment, dataEncipherment, digitalSignature, nonRepudiation
|
||||
extendedKeyUsage = serverAuth, clientAuth
|
||||
subjectAltName = @alt_names
|
||||
[alt_names]
|
||||
DNS.1 = traefik.local
|
||||
DNS.2 = opensearch.local
|
||||
DNS.3 = grafana.local
|
||||
EOF
|
||||
|
||||
##openssl req -new -newkey rsa:2048 -days 365 -nodes -x509 -keyout /data/traefik/certs/server.key -out /data/traefik/certs/server.crt -subj "/C=US/ST=NY/L=IT/O=security/CN=logger"
|
||||
#openssl genrsa -out /data/traefik/certs/traefik_rsa.key 2048
|
||||
#openssl pkcs8 -inform PEM -outform PEM -in /data/traefik/certs/traefik_rsa.key -topk8 -nocrypt -v1 PBE-SHA1-3DES -out /data/traefik/certs/traefik.key
|
||||
#openssl req -new -subj "/C=US/ST=NY/L=IT/O=security/CN=traefik" -key /data/traefik/certs/traefik.key -out /data/traefik/certs/traefik.csr
|
||||
#openssl x509 -req -days 3650 -in /data/traefik/certs/traefik.csr -CA /data/certificates/certs/opensearch-ca.pem -CAkey /data/certificates/certs/opensearch-ca.key -CAcreateserial -sha256 -out /data/traefik/certs/traefik.pem
|
||||
#openssl verify -CAfile /data/certificates/certs/opensearch-ca.pem /data/traefik/certs/traefik.pem
|
||||
#openssl x509 -noout -subject -in /data/traefik/certs/traefik.pem
|
||||
|
||||
openssl genrsa -out /data/traefik/certs/server_rsa.key 2048
|
||||
openssl pkcs8 -inform PEM -outform PEM -in /data/traefik/certs/server_rsa.key -topk8 -nocrypt -v1 PBE-SHA1-3DES -out /data/traefik/certs/server.key
|
||||
openssl req -new -config /tmp/request.conf -key /data/traefik/certs/server.key -out /data/traefik/certs/server.csr
|
||||
openssl x509 -req -days 3650 -extfile /tmp/request.conf -extensions v3_req -in /data/traefik/certs/server.csr -CA /data/certificates/certs/opensearch-ca.pem -CAkey /data/certificates/certs/opensearch-ca.key -CAcreateserial -sha256 -out /data/traefik/certs/server.pem
|
||||
|
||||
#openssl verify -CAfile /data/traefik/certs/server.pem /data/traefik/certs/server.pem
|
||||
#openssl x509 -text -in /data/traefik/certs/server.pem
|
||||
fi
|
||||
|
||||
sleep 2
|
||||
42
data/setup/build/03_configure_opensearch.sh
Normal file
42
data/setup/build/03_configure_opensearch.sh
Normal file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
|
||||
## run security_admin in each node
|
||||
#for NODE_NAME in "node1" "node2"
|
||||
#do
|
||||
#
|
||||
# COMMAND=(docker exec -it opensearch-$NODE_NAME /usr/share/opensearch/plugins/opensearch-security/tools/securityadmin.sh --clustername opensearch-cluster --configdir /usr/share/opensearch/config/opensearch-security -cacert /usr/share/opensearch/config/certs/opensearch-ca.pem -key /usr/share/opensearch/config/certs/opensearch-admin.key -cert /usr/share/opensearch/config/certs/opensearch-admin.pem -h opensearch-$NODE_NAME)
|
||||
#
|
||||
# until "${COMMAND[@]}" ; do
|
||||
# echo "opensearch not up yet. retrying in 10 seconds..."
|
||||
# sleep 10
|
||||
# done
|
||||
#done
|
||||
|
||||
# use opensearch-dashboards api to create index pattern logstash-* for global tennant until it succeeds. (this will not create it for you personal tenant)
|
||||
cat > /tmp/opensearch_create_index_pattern.sh << EOF
|
||||
curl -k \
|
||||
-X POST "http://opensearch-dashboards:5601/api/saved_objects/index-pattern/logstash-*" \
|
||||
-u 'admin:vagrant' \
|
||||
-H "securitytenant:global" \
|
||||
-H "osd-xsrf:true" \
|
||||
-H "content-type:application/json" \
|
||||
-d "{ \"attributes\": { \"title\": \"logstash-*\", \"timeFieldName\": \"@timestamp\" } }"
|
||||
EOF
|
||||
|
||||
cat > /tmp/opensearch_check_index_pattern.sh << EOF
|
||||
curl -k \
|
||||
-X GET "http://opensearch-dashboards:5601/api/saved_objects/index-pattern/logstash-*" \
|
||||
-u 'admin:vagrant' \
|
||||
-H "securitytenant:global" \
|
||||
-H "osd-xsrf:true" \
|
||||
-H "content-type:application/json" \
|
||||
| grep "namespace"
|
||||
EOF
|
||||
chmod +x /tmp/opensearch_*.sh
|
||||
|
||||
until "/tmp/opensearch_check_index_pattern.sh" ; do
|
||||
echo "opensearch index-pattern does not exist; trying to create logstash-*"
|
||||
/tmp/opensearch_create_index_pattern.sh
|
||||
sleep 10
|
||||
done
|
||||
echo "opensearch index-pattern created"
|
||||
56
data/setup/build/04_configure_grafana.sh
Normal file
56
data/setup/build/04_configure_grafana.sh
Normal file
@@ -0,0 +1,56 @@
|
||||
#!/bin/bash
|
||||
|
||||
cat > /tmp/grafana_check.sh << EOF
|
||||
curl -k \
|
||||
-X GET "http://grafana:3000/api/datasources" \
|
||||
-u 'admin:vagrant' \
|
||||
-H "content-type:application/json" \
|
||||
| grep '"name":"OpenSearch"'
|
||||
EOF
|
||||
|
||||
cat > /tmp/grafana_initial_setup.sh << EOF
|
||||
curl -k \
|
||||
-X POST "http://grafana:3000/api/datasources" \
|
||||
-u 'admin:vagrant' \
|
||||
-H "content-type:application/json" \
|
||||
-d '
|
||||
{
|
||||
"orgId": 1,
|
||||
"name": "OpenSearch",
|
||||
"type": "grafana-opensearch-datasource",
|
||||
"typeName": "OpenSearch",
|
||||
"typeLogoUrl": "public/plugins/grafana-opensearch-datasource/img/logo.svg",
|
||||
"access": "proxy",
|
||||
"url": "https://opensearch-node1:9200",
|
||||
"basicAuth": true,
|
||||
"basicAuthUser": "admin",
|
||||
"isDefault": true,
|
||||
"secureJsonData": {
|
||||
"basicAuthPassword": "vagrant"
|
||||
},
|
||||
"jsonData": {
|
||||
"database": "logstash-*",
|
||||
"esVersion": "8.0.0",
|
||||
"flavor": "opensearch",
|
||||
"logLevelField": "fields.level",
|
||||
"logMessageField": "message",
|
||||
"maxConcurrentShardRequests": 5,
|
||||
"pplEnabled": true,
|
||||
"timeField": "@timestamp",
|
||||
"tlsAuthWithCACert": false,
|
||||
"tlsSkipVerify": true,
|
||||
"version": "1.0.0"
|
||||
},
|
||||
"readOnly": false
|
||||
}
|
||||
'
|
||||
EOF
|
||||
|
||||
chmod +x /tmp/grafana*.sh
|
||||
|
||||
until "/tmp/grafana_check.sh" ; do
|
||||
echo "Grafana settings not applied; retrying"
|
||||
/tmp/grafana_initial_setup.sh
|
||||
sleep 10
|
||||
done
|
||||
echo "Grafana settings applied"
|
||||
9
data/setup/build/Dockerfile
Normal file
9
data/setup/build/Dockerfile
Normal file
@@ -0,0 +1,9 @@
|
||||
FROM ubuntu:22.04
|
||||
|
||||
RUN apt-get update && apt-get -y upgrade
|
||||
RUN apt-get -y install openssl docker.io curl dos2unix
|
||||
|
||||
COPY *.sh /opt/
|
||||
RUN chmod +x /opt/*.sh ; dos2unix /opt/*.sh
|
||||
|
||||
CMD ["bash", "/opt/entrypoint.sh"]
|
||||
11
data/setup/build/entrypoint.sh
Normal file
11
data/setup/build/entrypoint.sh
Normal file
@@ -0,0 +1,11 @@
|
||||
#!/bin/bash
|
||||
|
||||
/opt/01_precreate_folders.sh
|
||||
/opt/02_generate_certificates.sh
|
||||
echo "initial setup done. tag setup container as healthy to start other containers"
|
||||
touch /tmp/healthcheck.txt
|
||||
|
||||
/opt/03_configure_opensearch.sh
|
||||
/opt/04_configure_grafana.sh
|
||||
|
||||
sleep infinity
|
||||
51
data/syslog-filebeat/config/filebeat.yml
Normal file
51
data/syslog-filebeat/config/filebeat.yml
Normal file
@@ -0,0 +1,51 @@
|
||||
# for more modules visit https://www.elastic.co/guide/en/beats/filebeat/current/filebeat-modules-overview.html
|
||||
|
||||
filebeat.inputs:
|
||||
- type: udp
|
||||
max_message_size: 10KiB
|
||||
host: "0.0.0.0:514"
|
||||
tags: ["udp-514"]
|
||||
- type: tcp
|
||||
max_message_size: 10MiB
|
||||
host: "0.0.0.0:514"
|
||||
tags: ["tcp-514"]
|
||||
|
||||
filebeat.modules:
|
||||
#- module: cisco
|
||||
# asa:
|
||||
# var.syslog_host: 0.0.0.0
|
||||
# var.syslog_port: 9001
|
||||
# var.log_level: 5
|
||||
#
|
||||
#- module: cisco
|
||||
# ios:
|
||||
# var.syslog_host: 0.0.0.0
|
||||
# var.syslog_port: 9002
|
||||
# var.log_level: 5
|
||||
#
|
||||
#- module: cef
|
||||
# log:
|
||||
# var.syslog_host: 0.0.0.0
|
||||
# var.syslog_port: 9003
|
||||
#
|
||||
#- module: checkpoint
|
||||
# firewall:
|
||||
# var.syslog_host: 0.0.0.0
|
||||
# var.syslog_port: 9004
|
||||
#
|
||||
- module: netflow
|
||||
log:
|
||||
enabled: true
|
||||
var:
|
||||
netflow_host: 0.0.0.0
|
||||
netflow_port: 2055
|
||||
tags: ["netflow"]
|
||||
|
||||
#- module: snort
|
||||
# snort:
|
||||
# var.syslog_host: 0.0.0.0
|
||||
# var.syslog_port: 9532
|
||||
|
||||
output.logstash:
|
||||
enabled: true
|
||||
hosts: ["${LOGSTASH_HOST}"]
|
||||
28
data/syslog-logstash/config/logstash.conf
Normal file
28
data/syslog-logstash/config/logstash.conf
Normal file
@@ -0,0 +1,28 @@
|
||||
input {
|
||||
beats {
|
||||
port => 5044
|
||||
}
|
||||
}
|
||||
|
||||
filter {
|
||||
grok {
|
||||
match => ["message", "<%{DATA:event_priority}>%{SYSLOGTIMESTAMP:syslog_timestamp} %{SYSLOGHOST:syslog_hostname} %{GREEDYDATA:syslog_process}\[%{NUMBER:syslog_uid}\]: %{DATA:SYSLOGMESSAGE}"]
|
||||
add_tag => [ "syslog" ]
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
output {
|
||||
#stdout {}
|
||||
#file {
|
||||
# path => "/tmp/output.json"
|
||||
#}
|
||||
opensearch {
|
||||
hosts => ["${OPENSEARCH_HOST}"]
|
||||
index => "${OPENSEARCH_INDEX}-%{+YYYY-MM-dd}"
|
||||
user => "${LOGSTASH_USER}"
|
||||
password => "${LOGSTASH_PASSWORD}"
|
||||
ssl => true
|
||||
ssl_certificate_verification => false
|
||||
}
|
||||
}
|
||||
9
data/traefik/config/encryption.toml
Normal file
9
data/traefik/config/encryption.toml
Normal file
@@ -0,0 +1,9 @@
|
||||
[tls.stores]
|
||||
[tls.stores.default]
|
||||
[tls.stores.default.defaultCertificate]
|
||||
certFile = "/etc/traefik/certs/server.pem"
|
||||
keyFile = "/etc/traefik/certs/server.key"
|
||||
|
||||
[[tls.certificates]]
|
||||
certFile = "/etc/traefik/certs/server.pem"
|
||||
keyFile = "/etc/traefik/certs/server.key"
|
||||
Reference in New Issue
Block a user