From df243d75da3e879a7aed057a94fc71820b6820ea Mon Sep 17 00:00:00 2001 From: Federico Gustavo Galland <99492720+f-galland@users.noreply.github.com> Date: Fri, 2 Aug 2024 04:48:14 -0300 Subject: [PATCH 1/5] Adding /var/run/wazuh-indexer to the list of ignored files (#339) --- distribution/packages/src/rpm/wazuh-indexer.rpm.spec | 1 + 1 file changed, 1 insertion(+) diff --git a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec index b8af1bfec204b..1f01e24667dc7 100644 --- a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec +++ b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec @@ -109,6 +109,7 @@ set -- "$@" "%%dir /usr/lib/tmpfiles.d" set -- "$@" "%%dir /usr/share" set -- "$@" "%%dir /var" set -- "$@" "%%dir /var/run" +set -- "$@" "%%dir /var/run/%{name}" set -- "$@" "%%dir /run" set -- "$@" "%%dir /var/lib" set -- "$@" "%%dir /var/log" From a7f758cc6a76a93e8793f1fd7dd15d7d37f25a17 Mon Sep 17 00:00:00 2001 From: Federico Gustavo Galland <99492720+f-galland@users.noreply.github.com> Date: Mon, 5 Aug 2024 09:55:31 -0300 Subject: [PATCH 2/5] Fix sysv files (#343) * Change sysv script to reference opensearch script * Correct permissions on sysv script * Updating sysv service file permissions to match 4.8 --- distribution/packages/src/rpm/init.d/wazuh-indexer | 2 +- distribution/packages/src/rpm/wazuh-indexer.rpm.spec | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/packages/src/rpm/init.d/wazuh-indexer b/distribution/packages/src/rpm/init.d/wazuh-indexer index 218519f204a64..c29a1068bdf88 100644 --- a/distribution/packages/src/rpm/init.d/wazuh-indexer +++ b/distribution/packages/src/rpm/init.d/wazuh-indexer @@ -47,7 +47,7 @@ if [ -f "$OPENSEARCH_ENV_FILE" ]; then . "$OPENSEARCH_ENV_FILE" fi -exec="$OPENSEARCH_HOME/bin/wazuh-indexer" +exec="$OPENSEARCH_HOME/bin/opensearch" prog="wazuh-indexer" pidfile="$PID_DIR/${prog}.pid" diff --git a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec index 1f01e24667dc7..aab6814472146 100644 --- a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec +++ b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec @@ -234,7 +234,7 @@ exit 0 # Service files %attr(0644, root, root) %{_prefix}/lib/systemd/system/%{name}.service %attr(0644, root, root) %{_prefix}/lib/systemd/system/%{name}-performance-analyzer.service -%attr(0644, root, root) %{_sysconfdir}/init.d/%{name} +%attr(0750, root, root) %{_sysconfdir}/init.d/%{name} %attr(0644, root, root) %config(noreplace) %{_prefix}/lib/sysctl.d/%{name}.conf %attr(0644, root, root) %config(noreplace) %{_prefix}/lib/tmpfiles.d/%{name}.conf From d7aecfe8d3d8fd1f3977dac168760253a5db92ff Mon Sep 17 00:00:00 2001 From: Federico Gustavo Galland <99492720+f-galland@users.noreply.github.com> Date: Mon, 5 Aug 2024 10:40:06 -0300 Subject: [PATCH 3/5] Moving logstash dockerfile and associated files to its own folder (#302) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * Moving logstash dockerfile and associated files to its own folder * Removing unused context from logstash container in splunk compose * Changing context in manager to splunk integration to newly created logstash folder * Using common logstash container for Security Lake integration * Removing unused dockerfiles * Adding gzip to logstash pipeline * Adding gzip decompression to lambda * Changing aws REGION variable name * Cleanup commented code * Adding environment variables for docker images versions * Adding logstash version to .env * Fixing minor issues in compose files * Removing certs configuration * Format YAML and Readme file from 3rd-party integrations * Remove unused code * Upgrade integrations to the latest Wazuh version * Fic environment variable * Remove unused code and fix ASL documentation --------- Co-authored-by: Álex Ruiz --- integrations/.gitignore | 3 +- integrations/README.md | 11 ++-- .../amazon-security-lake/CONTRIBUTING.md | 12 ++--- integrations/amazon-security-lake/Dockerfile | 53 +++++-------------- integrations/amazon-security-lake/README.md | 2 +- .../aws-lambda.dockerfile | 17 ------ .../amazon-security-lake/invoke-lambda.sh | 2 +- .../logstash/pipeline/indexer-to-file.conf | 34 ------------ .../logstash/pipeline/indexer-to-s3.conf | 20 ++++--- .../amazon-security-lake/logstash/setup.sh | 10 ---- .../src/lambda_function.py | 3 +- integrations/docker/.env | 25 +++++++-- .../docker/compose.amazon-security-lake.yml | 46 ++++++++-------- .../docker/compose.indexer-elastic.yml | 41 +++++++------- .../docker/compose.indexer-opensearch.yml | 32 ++++++----- .../docker/compose.indexer-splunk.yml | 35 ++++++------ .../docker/compose.manager-elastic.yml | 30 ++++++----- .../docker/compose.manager-opensearch.yml | 45 ++++++++++++++-- .../docker/compose.manager-splunk.yml | 26 +++++---- integrations/docker/config/certs.yml | 20 ------- integrations/elastic/README.md | 11 ++-- integrations/{elastic => logstash}/Dockerfile | 8 +-- integrations/{elastic => }/logstash/setup.sh | 0 integrations/opensearch/README.md | 1 + integrations/opensearch/opensearch.yml | 2 +- .../opensearch/opensearch_dashboards.yml | 4 +- integrations/splunk/README.md | 11 ++-- 27 files changed, 238 insertions(+), 266 deletions(-) delete mode 100644 integrations/amazon-security-lake/aws-lambda.dockerfile delete mode 100644 integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf delete mode 100644 integrations/amazon-security-lake/logstash/setup.sh delete mode 100644 integrations/docker/config/certs.yml rename integrations/{elastic => logstash}/Dockerfile (74%) rename integrations/{elastic => }/logstash/setup.sh (100%) diff --git a/integrations/.gitignore b/integrations/.gitignore index a9728d8767e3a..49aa039cc7e32 100644 --- a/integrations/.gitignore +++ b/integrations/.gitignore @@ -1,2 +1,3 @@ external -docker/certs \ No newline at end of file +docker/certs +docker/config diff --git a/integrations/README.md b/integrations/README.md index 6adfb27c64305..bab4504f78324 100644 --- a/integrations/README.md +++ b/integrations/README.md @@ -14,14 +14,13 @@ and combines security data from AWS and a broad range of enterprise security dat Refer to these documents for more information about this integration: -* [User Guide](./amazon-security-lake/README.md). -* [Developer Guide](./amazon-security-lake/CONTRIBUTING.md). - +- [User Guide](./amazon-security-lake/README.md). +- [Developer Guide](./amazon-security-lake/CONTRIBUTING.md). ### Other integrations We host development environments to support the following integrations: -* [Splunk](./splunk/README.md). -* [Elasticsearch](./elastic/README.md). -* [OpenSearch](./opensearch/README.md). +- [Splunk](./splunk/README.md). +- [Elasticsearch](./elastic/README.md). +- [OpenSearch](./opensearch/README.md). diff --git a/integrations/amazon-security-lake/CONTRIBUTING.md b/integrations/amazon-security-lake/CONTRIBUTING.md index 6dc91e1ece047..1d8132d814c73 100644 --- a/integrations/amazon-security-lake/CONTRIBUTING.md +++ b/integrations/amazon-security-lake/CONTRIBUTING.md @@ -16,21 +16,18 @@ This Docker Compose project will bring up these services: - our [events generator](../tools/events-generator/README.md) - an AWS Lambda Python container. -On the one hand, the event generator will push events constantly to the indexer, to the `wazuh-alerts-4.x-sample` index by default (refer to the [events generator](../tools/events-generator/README.md) documentation for customization options). On the other hand, Logstash will query for new data and deliver it to output configured in the pipeline, which can be one of `indexer-to-s3` or `indexer-to-file`. +On the one hand, the event generator will push events constantly to the indexer, to the `wazuh-alerts-4.x-sample` index by default (refer to the [events generator](../tools/events-generator/README.md) documentation for customization options). On the other hand, Logstash will query for new data and deliver it to output configured in the pipeline `indexer-to-s3`. This pipeline delivers the data to an S3 bucket, from which the data is processed using a Lambda function, to finally be sent to the Amazon Security Lake bucket in Parquet format. -The `indexer-to-s3` pipeline is the method used by the integration. This pipeline delivers the data to an S3 bucket, from which the data is processed using a Lambda function, to finally be sent to the Amazon Security Lake bucket in Parquet format. - - -Attach a terminal to the container and start the integration by starting Logstash, as follows: +The pipeline starts automatically, but if you need to start it manually, attach a terminal to the Logstash container and start the integration using the command below: ```console -/usr/share/logstash/bin/logstash -f /usr/share/logstash/pipeline/indexer-to-s3.conf --path.settings /etc/logstash +/usr/share/logstash/bin/logstash -f /usr/share/logstash/pipeline/indexer-to-s3.conf ``` After 5 minutes, the first batch of data will show up in http://localhost:9444/ui/wazuh-aws-security-lake-raw. You'll need to invoke the Lambda function manually, selecting the log file to process. ```bash -bash amazon-security-lake/src/invoke-lambda.sh +bash amazon-security-lake/invoke-lambda.sh ``` Processed data will be uploaded to http://localhost:9444/ui/wazuh-aws-security-lake-parquet. Click on any file to download it, and check it's content using `parquet-tools`. Just make sure of installing the virtual environment first, through [requirements.txt](./requirements.txt). @@ -56,4 +53,3 @@ See [README.md](README.md). The instructions on that section have been based on **Docker is required**. The [Makefile](./Makefile) in this folder automates the generation of a zip deployment package containing the source code and the required dependencies for the AWS Lambda function. Simply run `make` and it will generate the `wazuh_to_amazon_security_lake.zip` file. The main target runs a Docker container to install the Python3 dependencies locally, and zips the source code and the dependencies together. - diff --git a/integrations/amazon-security-lake/Dockerfile b/integrations/amazon-security-lake/Dockerfile index 41fc87679734b..7039c2b935de8 100644 --- a/integrations/amazon-security-lake/Dockerfile +++ b/integrations/amazon-security-lake/Dockerfile @@ -1,46 +1,17 @@ -# MULTI-STAGE build +# docker build --platform linux/amd64 --no-cache -f aws-lambda.dockerfile -t docker-image:test . +# docker run --platform linux/amd64 -p 9000:8080 docker-image:test -FROM python:3.9 as builder -# Create a virtualenv for dependencies. This isolates these packages from -# system-level packages. -RUN python3 -m venv /env -# Setting these environment variables are the same as running -# source /env/bin/activate. -ENV VIRTUAL_ENV /env -ENV PATH /env/bin:$PATH -# Copy the application's requirements.txt and run pip to install all -# dependencies into the virtualenv. -COPY requirements.txt /app/requirements.txt -RUN pip install -r /app/requirements.txt +# FROM public.ecr.aws/lambda/python:3.9 +FROM amazon/aws-lambda-python:3.12 +# Copy requirements.txt +COPY requirements.aws.txt ${LAMBDA_TASK_ROOT} -FROM python:3.9 -ENV LOGSTASH_KEYSTORE_PASS="SecretPassword" -# Add the application source code. -COPY --chown=logstash:logstash ./src /home/app -# Add execution persmissions. -RUN chmod a+x /home/app/lambda_function.py -# Copy the application's dependencies. -COPY --from=builder /env /env +# Install the specified packages +RUN pip install -r requirements.aws.txt -# Install Logstash -RUN apt-get update && apt-get install -y iputils-ping wget gpg apt-transport-https -RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | gpg --dearmor -o /usr/share/keyrings/elastic-keyring.gpg && \ - echo "deb [signed-by=/usr/share/keyrings/elastic-keyring.gpg] https://artifacts.elastic.co/packages/8.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-8.x.list && \ - apt-get update && apt install -y logstash -# Install logstash-input-opensearch plugin. -RUN /usr/share/logstash/bin/logstash-plugin install logstash-input-opensearch -# Copy the Logstash's ingestion pipelines. -COPY --chown=logstash:logstash logstash/pipeline /usr/share/logstash/pipeline -# Grant logstash ownership over its files -RUN chown --recursive logstash:logstash /usr/share/logstash /etc/logstash /var/log/logstash /var/lib/logstash +# Copy function code +COPY src ${LAMBDA_TASK_ROOT} -USER logstash -# Copy and run the setup.sh script to create and configure a keystore for Logstash. -COPY --chown=logstash:logstash logstash/setup.sh /usr/share/logstash/bin/setup.sh -RUN bash /usr/share/logstash/bin/setup.sh - -# Disable ECS compatibility -RUN `echo "pipeline.ecs_compatibility: disabled" >> /etc/logstash/logstash.yml` - -WORKDIR /home/app \ No newline at end of file +# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) +CMD [ "lambda_function.lambda_handler" ] \ No newline at end of file diff --git a/integrations/amazon-security-lake/README.md b/integrations/amazon-security-lake/README.md index b071138049b8e..4fa22884909f2 100644 --- a/integrations/amazon-security-lake/README.md +++ b/integrations/amazon-security-lake/README.md @@ -90,7 +90,7 @@ Follow the [official documentation](https://docs.aws.amazon.com/lambda/latest/dg - Configure the runtime to have 512 MB of memory and 30 seconds timeout. - Configure a trigger so every object with `.txt` extension uploaded to the S3 bucket created previously invokes the Lambda. ![AWS Lambda trigger](./images/asl-lambda-trigger.jpeg) -- Use the [Makefile](./Makefile) to generate the zip package `wazuh_to_amazon_security_lake.zip`, and upload it to the S3 bucket created previously as per [these instructions](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-package.html#gettingstarted-package-zip). See [CONTRIBUTING](./CONTRIBUTING.md) for details about the Makefile. +- Use the [Makefile](./Makefile) to generate the zip package `wazuh_to_amazon_security_lake.zip`, and upload it to the S3 bucket created previously as per [these instructions](https://docs.aws.amazon.com/lambda/latest/dg/gettingstarted-package.html#gettingstarted-package-zip). See [CONTRIBUTING](./CONTRIBUTING.md) for details about the Makefile. - Configure the Lambda with the at least the required _Environment Variables_ below: | Environment variable | Required | Value | diff --git a/integrations/amazon-security-lake/aws-lambda.dockerfile b/integrations/amazon-security-lake/aws-lambda.dockerfile deleted file mode 100644 index 7039c2b935de8..0000000000000 --- a/integrations/amazon-security-lake/aws-lambda.dockerfile +++ /dev/null @@ -1,17 +0,0 @@ -# docker build --platform linux/amd64 --no-cache -f aws-lambda.dockerfile -t docker-image:test . -# docker run --platform linux/amd64 -p 9000:8080 docker-image:test - -# FROM public.ecr.aws/lambda/python:3.9 -FROM amazon/aws-lambda-python:3.12 - -# Copy requirements.txt -COPY requirements.aws.txt ${LAMBDA_TASK_ROOT} - -# Install the specified packages -RUN pip install -r requirements.aws.txt - -# Copy function code -COPY src ${LAMBDA_TASK_ROOT} - -# Set the CMD to your handler (could also be done as a parameter override outside of the Dockerfile) -CMD [ "lambda_function.lambda_handler" ] \ No newline at end of file diff --git a/integrations/amazon-security-lake/invoke-lambda.sh b/integrations/amazon-security-lake/invoke-lambda.sh index 7d2c379faae12..9ce9b05fbc2f0 100644 --- a/integrations/amazon-security-lake/invoke-lambda.sh +++ b/integrations/amazon-security-lake/invoke-lambda.sh @@ -39,4 +39,4 @@ curl -X POST "http://localhost:9000/2015-03-31/functions/function/invocations" - } } ] -}' \ No newline at end of file +}' diff --git a/integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf b/integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf deleted file mode 100644 index 1bee9afc62450..0000000000000 --- a/integrations/amazon-security-lake/logstash/pipeline/indexer-to-file.conf +++ /dev/null @@ -1,34 +0,0 @@ -input { - opensearch { - hosts => ["wazuh.indexer:9200"] - user => "${INDEXER_USERNAME}" - password => "${INDEXER_PASSWORD}" - ssl => true - ca_file => "/usr/share/logstash/root-ca.pem" - index => "wazuh-alerts-4.x-*" - query => '{ - "query": { - "range": { - "@timestamp": { - "gt": "now-1m" - } - } - } - }' - schedule => "* * * * *" - } -} - - -output { - stdout { - id => "output.stdout" - codec => json_lines - } - file { - id => "output.file" - path => "/var/log/logstash/indexer-to-file-%{+YYYY-MM-dd-HH}.log" - file_mode => 0644 - codec => json_lines - } -} diff --git a/integrations/amazon-security-lake/logstash/pipeline/indexer-to-s3.conf b/integrations/amazon-security-lake/logstash/pipeline/indexer-to-s3.conf index a2446b4d9406e..f1acee7b5c45c 100644 --- a/integrations/amazon-security-lake/logstash/pipeline/indexer-to-s3.conf +++ b/integrations/amazon-security-lake/logstash/pipeline/indexer-to-s3.conf @@ -27,19 +27,27 @@ output { s3 { id => "output.s3" access_key_id => "${AWS_ACCESS_KEY_ID}" - secret_access_key => "${AWS_SECRET_ACCESS_KEY}" - region => "${AWS_REGION}" - endpoint => "${AWS_ENDPOINT}" bucket => "${S3_BUCKET_RAW}" codec => "json_lines" - retry_count => 0 - validate_credentials_on_root_bucket => false + encoding => "gzip" + endpoint => "${AWS_ENDPOINT}" prefix => "%{+YYYY}%{+MM}%{+dd}" + region => "${AWS_REGION}" + retry_count => 0 + secret_access_key => "${AWS_SECRET_ACCESS_KEY}" server_side_encryption => true server_side_encryption_algorithm => "AES256" + time_file => 5 + validate_credentials_on_root_bucket => false additional_settings => { "force_path_style" => true } - time_file => 5 + } + file { + id => "output.file" + path => "/usr/share/logstash/logs/indexer-to-file-%{+YYYY-MM-dd-HH}.log" + file_mode => 0644 + codec => json_lines + flush_interval => 30 } } diff --git a/integrations/amazon-security-lake/logstash/setup.sh b/integrations/amazon-security-lake/logstash/setup.sh deleted file mode 100644 index 9527f1fa58362..0000000000000 --- a/integrations/amazon-security-lake/logstash/setup.sh +++ /dev/null @@ -1,10 +0,0 @@ -#!/usr/bin/bash - -# This script creates and configures a keystore for Logstash to store -# indexer's credentials. NOTE: works only for dockerized logstash. -# Source: https://www.elastic.co/guide/en/logstash/current/keystore.html - -# Create keystore -/usr/share/logstash/bin/logstash-keystore create --path.settings /etc/logstash -echo "admin" | /usr/share/logstash/bin/logstash-keystore add INDEXER_USERNAME --path.settings /etc/logstash -echo "admin" | /usr/share/logstash/bin/logstash-keystore add INDEXER_PASSWORD --path.settings /etc/logstash diff --git a/integrations/amazon-security-lake/src/lambda_function.py b/integrations/amazon-security-lake/src/lambda_function.py index da6f90d6c17d6..e56caa4bf6426 100644 --- a/integrations/amazon-security-lake/src/lambda_function.py +++ b/integrations/amazon-security-lake/src/lambda_function.py @@ -2,6 +2,7 @@ import os import urllib.parse import json +import gzip import boto3 import pyarrow as pa import pyarrow.parquet as pq @@ -31,7 +32,7 @@ def get_events(bucket: str, key: str) -> list: logger.info(f"Reading {key}.") try: response = s3_client.get_object(Bucket=bucket, Key=key) - data = response['Body'].read().decode('utf-8') + data = gzip.decompress(response['Body'].read()).decode('utf-8') return data.splitlines() except ClientError as e: logger.error( diff --git a/integrations/docker/.env b/integrations/docker/.env index 00977dd69a894..1ffa39b78b108 100644 --- a/integrations/docker/.env +++ b/integrations/docker/.env @@ -4,9 +4,6 @@ ELASTIC_PASSWORD=elastic # Password for the 'kibana_system' user (at least 6 characters) KIBANA_PASSWORD=elastic -# Version of Elastic products -STACK_VERSION=8.6.2 - # Set the cluster name CLUSTER_NAME=elastic @@ -22,8 +19,26 @@ KIBANA_PORT=5602 # Increase or decrease based on the available host memory (in bytes) MEM_LIMIT=1073741824 +# Wazuh version +WAZUH_VERSION=4.8.1 + +# Wazuh Indexer version (Provisionally using OpenSearch) +WAZUH_INDEXER_VERSION=2.14.0 + +# Wazuh Dashboard version (Provisionally using OpenSearch Dashboards) +WAZUH_DASHBOARD_VERSION=2.14.0 + +# Wazuh certs generator version +WAZUH_CERTS_GENERATOR_VERSION=0.0.1 + # OpenSearch destination cluster version OS_VERSION=2.14.0 -# Wazuh version -WAZUH_VERSION=4.7.5 \ No newline at end of file +# Logstash version: +LOGSTASH_OSS_VERSION=8.9.0 + +# Splunk version: +SPLUNK_VERSION=9.1.4 + +# Version of Elastic products +STACK_VERSION=8.14.3 diff --git a/integrations/docker/compose.amazon-security-lake.yml b/integrations/docker/compose.amazon-security-lake.yml index 16ee907668a0d..8c9b610b97c6f 100644 --- a/integrations/docker/compose.amazon-security-lake.yml +++ b/integrations/docker/compose.amazon-security-lake.yml @@ -1,4 +1,3 @@ -version: "3.8" name: "amazon-security-lake" services: @@ -13,7 +12,7 @@ services: command: bash -c "python run.py -a wazuh.indexer" wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} container_name: wazuh.indexer depends_on: wazuh-certs-generator: @@ -22,6 +21,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} # - cluster.name=opensearch-cluster - node.name=wazuh.indexer - discovery.type=single-node @@ -56,7 +56,7 @@ services: - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem wazuh.dashboard: - image: opensearchproject/opensearch-dashboards:2.12.0 + image: opensearchproject/opensearch-dashboards:${WAZUH_DASHBOARD_VERSION} container_name: wazuh.dashboard depends_on: - wazuh.indexer @@ -71,20 +71,24 @@ services: - ./certs/wazuh.dashboard.pem:/usr/share/opensearch-dashboards/config/certs/opensearch.pem - ./certs/root-ca.pem:/usr/share/opensearch-dashboards/config/certs/root-ca.pem environment: + WAZUH_DASHBOARD_VERSION: ${WAZUH_DASHBOARD_VERSION} OPENSEARCH_HOSTS: '["https://wazuh.indexer:9200"]' # Define the OpenSearch nodes that OpenSearch Dashboards will query - SERVER_SSL_ENABLED: 'true' - SERVER_SSL_KEY: '/usr/share/opensearch-dashboards/config/certs/opensearch.key' - SERVER_SSL_CERTIFICATE: '/usr/share/opensearch-dashboards/config/certs/opensearch.pem' - OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: '/usr/share/opensearch-dashboards/config/certs/root-ca.pem' + SERVER_SSL_ENABLED: "true" + SERVER_SSL_KEY: "/usr/share/opensearch-dashboards/config/certs/opensearch.key" + SERVER_SSL_CERTIFICATE: "/usr/share/opensearch-dashboards/config/certs/opensearch.pem" + OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: "/usr/share/opensearch-dashboards/config/certs/root-ca.pem" - wazuh.integration.security.lake: - image: wazuh/indexer-security-lake-integration - build: - context: ../amazon-security-lake - container_name: wazuh.integration.security.lake + logstash: depends_on: - wazuh.indexer - hostname: wazuh.integration.security.lake + # image: wazuh/indexer-security-lake-integration + image: logstash-oss:${LOGSTASH_OSS_VERSION} + build: + context: ../logstash + args: + - LOGSTASH_OSS_VERSION=${LOGSTASH_OSS_VERSION} + # container_name: wazuh.integration.security.lake + # hostname: wazuh.integration.security.lake environment: LOG_LEVEL: trace LOGSTASH_KEYSTORE_PASS: "SecretPassword" @@ -100,11 +104,8 @@ services: - "5044:5044" - "9600:9600" volumes: - - ../amazon-security-lake/logstash/pipeline:/usr/share/logstash/pipeline # TODO has 1000:1000. logstash's uid is 999 + - ../amazon-security-lake/logstash/pipeline:/usr/share/logstash/pipeline - ./certs/root-ca.pem:/usr/share/logstash/root-ca.pem - - ../amazon-security-lake/src:/usr/share/logstash/amazon-security-lake # TODO use dedicated folder - # - ./credentials:/usr/share/logstash/.aws/credentials # TODO credentials are not commited (missing) - command: tail -f /var/log/logstash/logstash-plain.log s3.ninja: image: scireum/s3-ninja:latest @@ -119,13 +120,10 @@ services: image: wazuh/indexer-security-lake-integration:lambda build: context: ../amazon-security-lake - dockerfile: ../amazon-security-lake/aws-lambda.dockerfile - container_name: wazuh.integration.security.lake.aws.lambda - hostname: wazuh.integration.security.lake.aws.lambda environment: AWS_ACCESS_KEY_ID: "AKIAIOSFODNN7EXAMPLE" AWS_SECRET_ACCESS_KEY: "wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY" - AWS_REGION: "us-east-1" + REGION: "us-east-1" AWS_BUCKET: "wazuh-aws-security-lake-parquet" S3_BUCKET_OCSF: "wazuh-aws-security-lake-ocsf" AWS_ENDPOINT: "http://s3.ninja:9000" @@ -137,7 +135,7 @@ services: - ../amazon-security-lake/src:/var/task ports: - "9000:8080" - + generate-certs-config: image: alpine:latest volumes: @@ -159,12 +157,14 @@ services: " wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator depends_on: generate-certs-config: condition: service_completed_successfully container_name: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} entrypoint: sh -c "/entrypoint.sh; chown -R 1000:999 /certificates; chmod 740 /certificates; chmod 440 /certificates/*" volumes: - ./certs/:/certificates/ diff --git a/integrations/docker/compose.indexer-elastic.yml b/integrations/docker/compose.indexer-elastic.yml index 937d712deee35..fa4f20b8165b3 100644 --- a/integrations/docker/compose.indexer-elastic.yml +++ b/integrations/docker/compose.indexer-elastic.yml @@ -11,7 +11,7 @@ services: command: bash -c "python run.py -a wazuh.indexer" wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} depends_on: wazuh-certs-generator: condition: service_completed_successfully @@ -19,6 +19,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} - node.name=wazuh.indexer - discovery.type=single-node - bootstrap.memory_lock=true @@ -51,7 +52,7 @@ services: - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem wazuh.dashboard: - image: opensearchproject/opensearch-dashboards:2.12.0 + image: opensearchproject/opensearch-dashboards:${WAZUH_DASHBOARD_VERSION} depends_on: - wazuh.indexer hostname: wazuh.dashboard @@ -65,12 +66,13 @@ services: - ./certs/wazuh.dashboard.pem:/usr/share/opensearch-dashboards/config/certs/opensearch.pem - ./certs/root-ca.pem:/usr/share/opensearch-dashboards/config/certs/root-ca.pem environment: + WAZUH_DASHBOARD_VERSION: ${WAZUH_DASHBOARD_VERSION} OPENSEARCH_HOSTS: '["https://wazuh.indexer:9200"]' # Define the OpenSearch nodes that OpenSearch Dashboards will query - SERVER_SSL_ENABLED: 'true' - SERVER_SSL_KEY: '/usr/share/opensearch-dashboards/config/certs/opensearch.key' - SERVER_SSL_CERTIFICATE: '/usr/share/opensearch-dashboards/config/certs/opensearch.pem' - OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: '/usr/share/opensearch-dashboards/config/certs/root-ca.pem' - + SERVER_SSL_ENABLED: "true" + SERVER_SSL_KEY: "/usr/share/opensearch-dashboards/config/certs/opensearch.key" + SERVER_SSL_CERTIFICATE: "/usr/share/opensearch-dashboards/config/certs/opensearch.pem" + OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: "/usr/share/opensearch-dashboards/config/certs/root-ca.pem" + generate-certs-config: image: alpine:latest volumes: @@ -92,8 +94,10 @@ services: " wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} depends_on: generate-certs-config: condition: service_completed_successfully @@ -102,7 +106,6 @@ services: - ./certs/:/certificates/ - ./config/certs.yml:/config/certs.yml - # ================================= # Elasticsearch, Kibana and Logstash # ================================= @@ -112,7 +115,7 @@ services: image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - es_certs:/usr/share/elasticsearch/config/certs - user: '0' + user: "0" command: > bash -c ' if [ x${ELASTIC_PASSWORD} == x ]; then @@ -158,11 +161,11 @@ services: echo "All done!"; ' healthcheck: - test: ['CMD-SHELL', '[ -f config/certs/es01/es01.crt ]'] + test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"] interval: 1s timeout: 5s retries: 120 - + es01: depends_on: setup: @@ -197,13 +200,13 @@ services: healthcheck: test: [ - 'CMD-SHELL', + "CMD-SHELL", "curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'", ] interval: 10s timeout: 10s retries: 120 - + kibana: depends_on: es01: @@ -226,20 +229,22 @@ services: healthcheck: test: [ - 'CMD-SHELL', + "CMD-SHELL", "curl -s -I https://localhost:5601 | grep -q 'HTTP/1.1 302 Found'", ] interval: 10s timeout: 10s retries: 120 - + logstash: depends_on: es01: condition: service_healthy - image: logstash-oss:8.6.2 + image: logstash-oss:${LOGSTASH_OSS_VERSION} build: - context: ../elastic + context: ../logstash + args: + - LOGSTASH_OSS_VERSION=${LOGSTASH_OSS_VERSION} environment: LOG_LEVEL: info MONITORING_ENABLED: false diff --git a/integrations/docker/compose.indexer-opensearch.yml b/integrations/docker/compose.indexer-opensearch.yml index ed878c8054b92..8fc2c4364117c 100644 --- a/integrations/docker/compose.indexer-opensearch.yml +++ b/integrations/docker/compose.indexer-opensearch.yml @@ -11,7 +11,7 @@ services: command: bash -c "python run.py -a wazuh.indexer" wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} depends_on: wazuh-certs-generator: condition: service_completed_successfully @@ -19,6 +19,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} - node.name=wazuh.indexer - discovery.type=single-node - bootstrap.memory_lock=true @@ -51,7 +52,7 @@ services: - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem wazuh.dashboard: - image: opensearchproject/opensearch-dashboards:2.12.0 + image: opensearchproject/opensearch-dashboards:${WAZUH_DASHBOARD_VERSION} depends_on: - wazuh.indexer hostname: wazuh.dashboard @@ -66,11 +67,12 @@ services: - ./certs/opensearch.dashboards.pem:/usr/share/opensearch-dashboards/config/certs/opensearch.pem - ./certs/root-ca.pem:/usr/share/opensearch-dashboards/config/certs/root-ca.pem environment: + WAZUH_DASHBOARD_VERSION: ${WAZUH_DASHBOARD_VERSION} OPENSEARCH_HOSTS: '["https://wazuh.indexer:9200"]' - SERVER_SSL_ENABLED: 'true' - SERVER_SSL_KEY: '/usr/share/opensearch-dashboards/config/certs/opensearch.key' - SERVER.SSL_CERTIFICATE: '/usr/share/opensearch-dashboards/config/certs/opensearch.pem' - OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: '/usr/share/opensearch-dashboards/config/certs/root-ca.pem' + SERVER_SSL_ENABLED: "true" + SERVER_SSL_KEY: "/usr/share/opensearch-dashboards/config/certs/opensearch.key" + SERVER.SSL_CERTIFICATE: "/usr/share/opensearch-dashboards/config/certs/opensearch.pem" + OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: "/usr/share/opensearch-dashboards/config/certs/root-ca.pem" generate-certs-config: image: alpine:latest @@ -97,8 +99,10 @@ services: " wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} depends_on: generate-certs-config: condition: service_completed_successfully @@ -107,7 +111,6 @@ services: - ./certs/:/certificates/ - ./config/certs.yml:/config/certs.yml - # ================================================ # OpenSearch, OpenSearch Dashboards and Logstash # ================================================ @@ -122,7 +125,7 @@ services: - node.name=opensearch.node - discovery.type=single-node - bootstrap.memory_lock=true - - 'OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m' + - "OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx512m" - "DISABLE_INSTALL_DEMO_CONFIG=true" volumes: - ../opensearch/opensearch.yml:/usr/share/opensearch/config/opensearch.yml @@ -142,7 +145,7 @@ services: healthcheck: test: [ - 'CMD-SHELL', + "CMD-SHELL", "curl -sku admin:admin https://opensearch.node:9200 2>&1 | grep -q 'The OpenSearch Project: https://opensearch.org/'", ] interval: 1s @@ -157,7 +160,7 @@ services: ports: - 5602:5601 expose: - - '5602' + - "5602" volumes: - ../opensearch/opensearch_dashboards.yml:/usr/share/opensearch-dashboards/config/opensearch_dashboards.yml - ./certs/:/usr/share/opensearch-dashboards/config/certs/ @@ -169,13 +172,16 @@ services: - 'OPENSEARCH_HOSTS="https://opensearch.node:9200"' logstash: - image: logstash-oss:8.6.2 + image: logstash-oss:${LOGSTASH_OSS_VERSION} depends_on: opensearch.node: condition: service_healthy build: - context: ../opensearch + context: ../logstash + args: + - LOGSTASH_OSS_VERSION=${LOGSTASH_OSS_VERSION} environment: + LOGSTASH_OSS_VERSION: ${LOGSTASH_OSS_VERSION} LOG_LEVEL: info MONITORING_ENABLED: false volumes: diff --git a/integrations/docker/compose.indexer-splunk.yml b/integrations/docker/compose.indexer-splunk.yml index 1336575bb0e45..cf97d12b8a475 100644 --- a/integrations/docker/compose.indexer-splunk.yml +++ b/integrations/docker/compose.indexer-splunk.yml @@ -11,7 +11,7 @@ services: command: bash -c "python run.py -a wazuh.indexer" wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} depends_on: wazuh-certs-generator: condition: service_completed_successfully @@ -19,6 +19,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} - node.name=wazuh.indexer - discovery.type=single-node - bootstrap.memory_lock=true @@ -51,7 +52,7 @@ services: - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem wazuh.dashboard: - image: opensearchproject/opensearch-dashboards:2.12.0 + image: opensearchproject/opensearch-dashboards:${WAZUH_DASHBOARD_VERSION} depends_on: - wazuh.indexer hostname: wazuh.dashboard @@ -65,12 +66,13 @@ services: - ./certs/wazuh.dashboard.pem:/usr/share/opensearch-dashboards/config/certs/opensearch.pem - ./certs/root-ca.pem:/usr/share/opensearch-dashboards/config/certs/root-ca.pem environment: + WAZUH_DASHBOARD_VERSION: ${WAZUH_DASHBOARD_VERSION} OPENSEARCH_HOSTS: '["https://wazuh.indexer:9200"]' # Define the OpenSearch nodes that OpenSearch Dashboards will query - SERVER_SSL_ENABLED: 'true' - SERVER_SSL_KEY: '/usr/share/opensearch-dashboards/config/certs/opensearch.key' - SERVER_SSL_CERTIFICATE: '/usr/share/opensearch-dashboards/config/certs/opensearch.pem' - OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: '/usr/share/opensearch-dashboards/config/certs/root-ca.pem' - + SERVER_SSL_ENABLED: "true" + SERVER_SSL_KEY: "/usr/share/opensearch-dashboards/config/certs/opensearch.key" + SERVER_SSL_CERTIFICATE: "/usr/share/opensearch-dashboards/config/certs/opensearch.pem" + OPENSEARCH_SSL_CERTIFICATEAUTHORITIES: "/usr/share/opensearch-dashboards/config/certs/root-ca.pem" + generate-certs-config: image: alpine:latest volumes: @@ -92,8 +94,10 @@ services: " wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} depends_on: generate-certs-config: condition: service_completed_successfully @@ -102,7 +106,6 @@ services: - ./certs/:/certificates/ - ./config/certs.yml:/config/certs.yml - # ================================= # Splunk and Logstash # ================================= @@ -133,7 +136,7 @@ services: ' splunk: - image: splunk/splunk:9.0.4 + image: splunk/splunk:${SPLUNK_VERSION} volumes: - ./certs/splunk.key:/opt/splunk/etc/auth/custom/splunk.key - ./certs/splunk.pem:/opt/splunk/etc/auth/custom/splunk.pem @@ -146,12 +149,13 @@ services: generator: condition: service_completed_successfully ports: - - '8000:8000' - - '8088:8088' + - "8000:8000" + - "8088:8088" environment: + SPLUNK_VERSION: ${SPLUNK_VERSION} SPLUNK_HEC_TOKEN: "abcd1234" SPLUNK_HOSTNAME: splunk - SPLUNK_HTTP_ENABLESSL: 'true' + SPLUNK_HTTP_ENABLESSL: "true" SPLUNK_PASSWORD: Password.1234 SPLUNK_STANDALONE_URL: https://splunk:8080 SPLUNK_START_ARGS: --accept-license @@ -160,10 +164,9 @@ services: depends_on: splunk: condition: service_healthy - image: logstash-oss:8.6.2 - build: - context: ../splunk + image: logstash-oss:${LOGSTASH_OSS_VERSION} environment: + LOGSTASH_OSS_VERSION: ${LOGSTASH_OSS_VERSION} LOG_LEVEL: info MONITORING_ENABLED: false volumes: diff --git a/integrations/docker/compose.manager-elastic.yml b/integrations/docker/compose.manager-elastic.yml index 12ee41527ee24..c299851001e6e 100644 --- a/integrations/docker/compose.manager-elastic.yml +++ b/integrations/docker/compose.manager-elastic.yml @@ -48,7 +48,7 @@ services: - alerts:/var/ossec/logs/alerts/ wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} depends_on: wazuh-certs-generator: condition: service_completed_successfully @@ -56,6 +56,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} - node.name=wazuh.indexer - discovery.type=single-node - bootstrap.memory_lock=true @@ -89,23 +90,28 @@ services: - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} entrypoint: sh -c "/entrypoint.sh; chown -R 1000:999 /certificates; chmod 740 /certificates; chmod 440 /certificates/*" volumes: - ./certs/:/certificates/ - ./config/certs.yml:/config/certs.yml - + logstash: depends_on: es01: condition: service_healthy wazuh-certs-generator: condition: service_completed_successfully - image: logstash-oss:8.6.2 + image: logstash-oss:${LOGSTASH_OSS_VERSION} build: - context: ../elastic + context: ../logstash + args: + - LOGSTASH_OSS_VERSION=${LOGSTASH_OSS_VERSION} environment: + LOGSTASH_OSS_VERSION: ${LOGSTASH_OSS_VERSION} LOG_LEVEL: info MONITORING_ENABLED: false volumes: @@ -114,8 +120,6 @@ services: - alerts:/var/ossec/logs/alerts/ command: logstash -f /usr/share/logstash/pipeline/manager-to-elastic.conf - - # ================================= # Elasticsearch and Kibana # ================================= @@ -125,7 +129,7 @@ services: image: docker.elastic.co/elasticsearch/elasticsearch:${STACK_VERSION} volumes: - es_certs:/usr/share/elasticsearch/config/certs - user: '0' + user: "0" command: > bash -c ' if [ x${ELASTIC_PASSWORD} == x ]; then @@ -171,11 +175,11 @@ services: echo "All done!"; ' healthcheck: - test: ['CMD-SHELL', '[ -f config/certs/es01/es01.crt ]'] + test: ["CMD-SHELL", "[ -f config/certs/es01/es01.crt ]"] interval: 1s timeout: 5s retries: 120 - + es01: depends_on: setup: @@ -210,13 +214,13 @@ services: healthcheck: test: [ - 'CMD-SHELL', + "CMD-SHELL", "curl -s --cacert config/certs/ca/ca.crt https://localhost:9200 | grep -q 'missing authentication credentials'", ] interval: 10s timeout: 10s retries: 120 - + kibana: depends_on: es01: @@ -239,7 +243,7 @@ services: healthcheck: test: [ - 'CMD-SHELL', + "CMD-SHELL", "curl -s -I https://localhost:5601 | grep -q 'HTTP/1.1 302 Found'", ] interval: 10s diff --git a/integrations/docker/compose.manager-opensearch.yml b/integrations/docker/compose.manager-opensearch.yml index b7f1c64bb239e..52055b82f4511 100644 --- a/integrations/docker/compose.manager-opensearch.yml +++ b/integrations/docker/compose.manager-opensearch.yml @@ -14,6 +14,9 @@ services: wazuh.manager: image: wazuh/wazuh-manager:${WAZUH_VERSION} + depends_on: + wazuh-certs-generator: + condition: service_completed_successfully hostname: wazuh.manager restart: always ulimits: @@ -48,7 +51,7 @@ services: - alerts:/var/ossec/logs/alerts/ wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} depends_on: wazuh-certs-generator: condition: service_completed_successfully @@ -56,6 +59,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} - node.name=wazuh.indexer - discovery.type=single-node - bootstrap.memory_lock=true @@ -88,9 +92,38 @@ services: - ./certs/wazuh.indexer-key.pem:/usr/share/opensearch/config/wazuh.indexer-key.pem - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem + generate-certs-config: + image: alpine:latest + volumes: + - ./config:/config + command: | + sh -c " + echo ' + nodes: + indexer: + - name: wazuh.indexer + ip: \"wazuh.indexer\" + - name: opensearch.node + ip: \"opensearch.node\" + server: + - name: wazuh.manager + ip: \"wazuh.manager\" + dashboard: + - name: wazuh.dashboard + ip: \"wazuh.dashboard\" + - name: opensearch.dashboards + ip: \"opensearch.dashboards\" + ' > /config/certs.yml + " + wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} + depends_on: + generate-certs-config: + condition: service_completed_successfully entrypoint: sh -c "/entrypoint.sh; chown -R 1000:999 /certificates; chmod 740 /certificates; chmod 440 /certificates/*" volumes: - ./certs/:/certificates/ @@ -102,11 +135,13 @@ services: condition: service_healthy wazuh-certs-generator: condition: service_completed_successfully - image: logstash-oss:8.6.2 + image: logstash-oss:${LOGSTASH_OSS_VERSION} build: - dockerfile: ../elastic/Dockerfile - context: ../opensearch + context: ../logstash + args: + - LOGSTASH_OSS_VERSION=${LOGSTASH_OSS_VERSION} environment: + LOGSTASH_OSS_VERSION: ${LOGSTASH_OSS_VERSION} LOG_LEVEL: info MONITORING_ENABLED: false volumes: diff --git a/integrations/docker/compose.manager-splunk.yml b/integrations/docker/compose.manager-splunk.yml index 532eb73374b7f..b029cd2224562 100644 --- a/integrations/docker/compose.manager-splunk.yml +++ b/integrations/docker/compose.manager-splunk.yml @@ -48,7 +48,7 @@ services: - alerts:/var/ossec/logs/alerts/ wazuh.indexer: - image: opensearchproject/opensearch:2.12.0 + image: opensearchproject/opensearch:${WAZUH_INDEXER_VERSION} depends_on: wazuh-certs-generator: condition: service_completed_successfully @@ -56,6 +56,7 @@ services: ports: - 9200:9200 environment: + - WAZUH_INDEXER_VERSION=${WAZUH_INDEXER_VERSION} - node.name=wazuh.indexer - discovery.type=single-node - bootstrap.memory_lock=true @@ -89,8 +90,10 @@ services: - ./certs/root-ca.pem:/usr/share/opensearch/config/root-ca.pem wazuh-certs-generator: - image: wazuh/wazuh-certs-generator:0.0.1 + image: wazuh/wazuh-certs-generator:${WAZUH_CERTS_GENERATOR_VERSION} hostname: wazuh-certs-generator + environment: + - WAZUH_CERTS_GENERATOR_VERSION=${WAZUH_CERTS_GENERATOR_VERSION} entrypoint: sh -c "/entrypoint.sh; chown -R 1000:999 /certificates; chmod 740 /certificates; chmod 440 /certificates/*" volumes: - ./certs/:/certificates/ @@ -102,11 +105,13 @@ services: condition: service_healthy wazuh-certs-generator: condition: service_completed_successfully - image: logstash-oss:8.6.2 + image: logstash-oss:${LOGSTASH_OSS_VERSION} build: - dockerfile: ../elastic/Dockerfile - context: ../splunk + context: ../logstash + args: + - LOGSTASH_OSS_VERSION=${LOGSTASH_OSS_VERSION} environment: + LOGSTASH_OSS_VERSION: ${LOGSTASH_OSS_VERSION} LOG_LEVEL: info MONITORING_ENABLED: false volumes: @@ -145,7 +150,7 @@ services: ' splunk: - image: splunk/splunk:9.0.4 + image: splunk/splunk:${SPLUNK_VERSION} volumes: - ./certs/splunk.key:/opt/splunk/etc/auth/custom/splunk.key - ./certs/splunk.pem:/opt/splunk/etc/auth/custom/splunk.pem @@ -156,12 +161,13 @@ services: generator: condition: service_completed_successfully ports: - - '8000:8000' - - '8088:8088' + - "8000:8000" + - "8088:8088" environment: + SPLUNK_VERSION: ${SPLUNK_VERSION} SPLUNK_HEC_TOKEN: "abcd1234" SPLUNK_HOSTNAME: splunk - SPLUNK_HTTP_ENABLESSL: 'true' + SPLUNK_HTTP_ENABLESSL: "true" SPLUNK_PASSWORD: Password.1234 SPLUNK_STANDALONE_URL: https://splunk:8080 SPLUNK_START_ARGS: --accept-license @@ -169,4 +175,4 @@ services: volumes: data: es_certs: - alerts: \ No newline at end of file + alerts: diff --git a/integrations/docker/config/certs.yml b/integrations/docker/config/certs.yml deleted file mode 100644 index 7a4ef691e5e10..0000000000000 --- a/integrations/docker/config/certs.yml +++ /dev/null @@ -1,20 +0,0 @@ -nodes: - # Wazuh indexer and OpenSearch server nodes - indexer: - - name: wazuh.indexer - ip: wazuh.indexer - - name: opensearch.node - ip: opensearch.node - - # Wazuh server nodes - # Use node_type only with more than one Wazuh manager - server: - - name: wazuh.manager - ip: wazuh.manager - - # Wazuh dashboard and OpenSearch Dashboards nodes - dashboard: - - name: wazuh.dashboard - ip: wazuh.dashboard - - name: opensearch.dashboards - ip: opensearch.dashboards diff --git a/integrations/elastic/README.md b/integrations/elastic/README.md index 30d34b88b3a9f..3707586c959b5 100644 --- a/integrations/elastic/README.md +++ b/integrations/elastic/README.md @@ -34,16 +34,17 @@ For custom configurations, you may need to modify these files: - [elastic/logstash/pipeline/indexer-to-elastic.conf](./logstash/pipeline/indexer-to-elastic.conf): Logstash Pipeline configuration file. If you opted to start the integration with the Wazuh Manager, you can modify the following files: + - [docker/compose.manager-elastic.yml](../docker/compose.manager-elastic.yml): Docker Compose file. - [elastic/logstash/pipeline/manager-to-elastic.conf](./logstash/pipeline/manager-to-elastic.conf): Logstash Pipeline configuration file. Check the files above for **credentials**, ports, and other configurations. -| Service | Address | Credentials | -| ---------------- | ---------------------- | --------------- | -| Wazuh Indexer | https://localhost:9200 | admin:admin | -| Elastic | https://localhost:9201 | elastic:elastic | -| Kibana | https://localhost:5602 | elastic:elastic | +| Service | Address | Credentials | +| ------------- | ---------------------- | --------------- | +| Wazuh Indexer | https://localhost:9200 | admin:admin | +| Elastic | https://localhost:9201 | elastic:elastic | +| Kibana | https://localhost:5602 | elastic:elastic | ## Importing the dashboards diff --git a/integrations/elastic/Dockerfile b/integrations/logstash/Dockerfile similarity index 74% rename from integrations/elastic/Dockerfile rename to integrations/logstash/Dockerfile index 82314abd3c3bd..0c487bc7b2ca1 100644 --- a/integrations/elastic/Dockerfile +++ b/integrations/logstash/Dockerfile @@ -1,4 +1,5 @@ -FROM opensearchproject/logstash-oss-with-opensearch-output-plugin:latest +ARG LOGSTASH_OSS_VERSION +FROM opensearchproject/logstash-oss-with-opensearch-output-plugin:${LOGSTASH_OSS_VERSION} ENV LOGSTASH_KEYSTORE_PASS "SecretPassword" ENV LS_PATH "/usr/share/logstash" @@ -8,12 +9,11 @@ USER logstash # Install plugin RUN LS_JAVA_OPTS="-Xms1024m -Xmx1024m" logstash-plugin install logstash-input-opensearch -COPY --chown=logstash:logstash logstash/pipeline /usr/share/logstash/pipeline # Copy and run the setup.sh script to create and configure a keystore for Logstash. -COPY --chown=logstash:logstash logstash/setup.sh /usr/share/logstash/bin/setup.sh +COPY --chown=logstash:logstash ./setup.sh /usr/share/logstash/bin/setup.sh RUN bash /usr/share/logstash/bin/setup.sh # Disable ECS compatibility RUN `echo "pipeline.ecs_compatibility: disabled" >> /usr/share/logstash/config/logstash.yml` -WORKDIR /usr/share/logstash \ No newline at end of file +WORKDIR /usr/share/logstash diff --git a/integrations/elastic/logstash/setup.sh b/integrations/logstash/setup.sh similarity index 100% rename from integrations/elastic/logstash/setup.sh rename to integrations/logstash/setup.sh diff --git a/integrations/opensearch/README.md b/integrations/opensearch/README.md index 68e8c4b5693a6..6c55e62653b48 100644 --- a/integrations/opensearch/README.md +++ b/integrations/opensearch/README.md @@ -34,6 +34,7 @@ For custom configurations, you may need to modify these files: - [opensearch/logstash/pipeline/indexer-to-opensearch.conf](./logstash/pipeline/indexer-to-opensearch.conf): Logstash Pipeline configuration file. If you opted to start the integration with the Wazuh Manager, you can modify the following files: + - [docker/compose.manager-opensearch.yml](../docker/compose.manager-opensearch.yml): Docker Compose file. - [opensearch/logstash/pipeline/manager-to-opensearch.conf](./logstash/pipeline/manager-to-opensearch.conf): Logstash Pipeline configuration file. diff --git a/integrations/opensearch/opensearch.yml b/integrations/opensearch/opensearch.yml index d11190563f6b9..442c0b707f2ec 100644 --- a/integrations/opensearch/opensearch.yml +++ b/integrations/opensearch/opensearch.yml @@ -36,4 +36,4 @@ plugins.security.system_indices.indices: ".replication-metadata-store", ] plugins.security.allow_default_init_securityindex: true -cluster.routing.allocation.disk.threshold_enabled: false \ No newline at end of file +cluster.routing.allocation.disk.threshold_enabled: false diff --git a/integrations/opensearch/opensearch_dashboards.yml b/integrations/opensearch/opensearch_dashboards.yml index f0f0c1ad77543..316ebabcf1179 100644 --- a/integrations/opensearch/opensearch_dashboards.yml +++ b/integrations/opensearch/opensearch_dashboards.yml @@ -13,9 +13,9 @@ opensearch_security.readonly_mode.roles: ["kibana_read_only"] server.ssl.enabled: true server.ssl.key: "/usr/share/opensearch-dashboards/config/certs/opensearch.key" server.ssl.certificate: "/usr/share/opensearch-dashboards/config/certs/opensearch.pem" -opensearch.ssl.certificateAuthorities: ["/usr/share/opensearch-dashboards/config/certs/root-ca.pem"] +opensearch.ssl.certificateAuthorities: + ["/usr/share/opensearch-dashboards/config/certs/root-ca.pem"] opensearch.username: "kibanaserver" opensearch.password: "kibanaserver" opensearchDashboards.branding: useExpandedHeader: false - diff --git a/integrations/splunk/README.md b/integrations/splunk/README.md index 48fa738fc4aef..1e8be6a1e2996 100644 --- a/integrations/splunk/README.md +++ b/integrations/splunk/README.md @@ -33,21 +33,22 @@ For custom configurations, you may need to modify these files: - [splunk/logstash/pipeline/indexer-to-splunk.conf](./logstash/pipeline/indexer-to-splunk.conf): Logstash Pipeline configuration file. If you opted to start the integration with the Wazuh Manager, you can modify the following files: + - [docker/compose.manager-splunk.yml](../docker/compose.manager-splunk.yml): Docker Compose file. - [splunk/logstash/pipeline/manager-to-splunk.conf](./logstash/pipeline/manager-to-splunk.conf): Logstash Pipeline configuration file. Check the files above for **credentials**, ports, and other configurations. -| Service | Address | Credentials | -| ---------------- | ---------------------- | ------------------- | -| Wazuh Indexer | https://localhost:9200 | admin:admin | -| Splunk | https://localhost:8000 | admin:Password.1234 | +| Service | Address | Credentials | +| ------------- | ---------------------- | ------------------- | +| Wazuh Indexer | https://localhost:9200 | admin:admin | +| Splunk | https://localhost:8000 | admin:Password.1234 | ## Importing the dashboards The dashboards for Splunk are included in this folder. The steps to import them to Splunk are the following: -- In the Splunk UI, go to `Settings` > `Data Inputs` > `HTTP Event Collector` and make sure that the `hec` token is enabled and uses the `wazuh-alerts` index. +- In the Splunk UI, go to `Settings` > `Data Inputs` > `HTTP Event Collector` and make sure that the `hec` token is enabled and uses the `wazuh-alerts` index. - Open a dashboard file and copy all its content. - In the Splunk UI, navigate to `Search & Reporting`, `Dashboards`, click `Create New Dashboard`, write the title and select `Dashboard Studio`, select `Grid` and click on `Create`. - On the top menu, there is a `Source` icon. Click on it, and replace all the content with the copied content from the dashboard file. After that, click on `Back` and click on `Save`. From 521f27c3793bc1d0d250a81a237dce08b28d0ffc Mon Sep 17 00:00:00 2001 From: Federico Gustavo Galland <99492720+f-galland@users.noreply.github.com> Date: Thu, 8 Aug 2024 05:32:56 -0300 Subject: [PATCH 4/5] Fixing chown command (#347) --- distribution/packages/src/rpm/wazuh-indexer.rpm.spec | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec index aab6814472146..affba09fcf4ca 100644 --- a/distribution/packages/src/rpm/wazuh-indexer.rpm.spec +++ b/distribution/packages/src/rpm/wazuh-indexer.rpm.spec @@ -176,8 +176,8 @@ exit 0 %post set -e -chown -R %{name}.%{name} %{config_dir} -chown -R %{name}.%{name} %{log_dir} +chown -R %{name}:%{name} %{config_dir} +chown -R %{name}:%{name} %{log_dir} # Apply PerformanceAnalyzer Settings chmod a+rw /tmp From 9fd1835bba77ae04d48550eb4dc9be4787070806 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C3=81lex=20Ruiz?= Date: Fri, 30 Aug 2024 10:54:10 +0200 Subject: [PATCH 5/5] Add Changelog for 4.9.0 (#364) --- CHANGELOG.md | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 95ad15737ba87..38621c6b6c644 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,13 +3,18 @@ All notable changes to this project are documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). See the [CONTRIBUTING guide](./CONTRIBUTING.md#Changelog) for instructions on how to add changelog entries. -## [Unreleased 2.x] +## [Unreleased 4.9.x] ### Added +- [Amazon Security Lake integration as source](https://github.com/wazuh/wazuh-indexer/issues/128) +- [Splunk, OpenSearch and Elastic integrations](https://github.com/wazuh/wazuh-indexer/issues/250) + ### Dependencies ### Changed +- [Wazuh indexer fork update](https://github.com/wazuh/wazuh-indexer/issues/54) + ### Deprecated ### Removed @@ -18,4 +23,4 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), ### Security -[Unreleased 2.x]: https://github.com/opensearch-project/OpenSearch/compare/2.12...2.x +[Unreleased 4.9.x]: https://github.com/wazuh/wazuh-indexer/compare/BASE...4.9.0