Skip to content

Commit

Permalink
Merge branch 'ait-aecid:development' into 230_russellmitchell
Browse files Browse the repository at this point in the history
  • Loading branch information
ernstleierzopf committed Mar 17, 2024
2 parents f9432a8 + 0fb2d6b commit ea46a83
Show file tree
Hide file tree
Showing 9 changed files with 253 additions and 176 deletions.
2 changes: 0 additions & 2 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@ RUN apt-get update && apt-get install -y \
python3-pylibacl \
python3-urllib3 \
python3-statsmodels \
python3-pandas \
python3-patsy \
python3-numpy \
python3-defusedxml \
Expand Down Expand Up @@ -97,7 +96,6 @@ RUN ln -s /usr/lib/logdata-anomaly-miner/aminerremotecontrol.py /usr/bin/aminerr
&& ln -s /usr/lib/python3/dist-packages/urllib3 /usr/lib/logdata-anomaly-miner/urllib3 \
&& ln -s /usr/lib/python3/dist-packages/statsmodels /usr/lib/logdata-anomaly-miner/statsmodels \
&& ln -s /usr/lib/python3/dist-packages/packaging /usr/lib/logdata-anomaly-miner/packaging \
&& ln -s /usr/lib/python3/dist-packages/pandas /etc/aminer/conf-enabled/pandas \
&& ln -s /usr/lib/python3/dist-packages/patsy /etc/aminer/conf-enabled/patsy \
&& ln -s /usr/lib/python3/dist-packages/defusedxml /etc/aminer/conf-enabled/defusedxml \
&& groupadd -g $GID -o $UNAME && useradd -u $UID -g $GID -ms /usr/sbin/nologin $UNAME && mkdir -p /var/lib/aminer/logs \
Expand Down
74 changes: 40 additions & 34 deletions Jenkinsfile
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,13 @@ pipeline {
sh "docker build -f aecid-testsuite/Dockerfile -t aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID ."
}
}
stage("Static Analysis & Basic Functionality") {
stage("Testing") {
parallel {
stage("Declarative: Static Analysis & Basic Functionality") {
steps {
sh "echo \"Running static analysis & basic functionality tests.\""
}
}
stage("Mypy"){
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runMypy"
Expand Down Expand Up @@ -63,17 +68,17 @@ pipeline {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runOfflineMode"
}
}
}
}

stage("Unittests") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runUnittests"
}
}
stage("Unittests") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runUnittests"
}
}

stage("Aminer Demo Tests") {
parallel {
stage("Declarative: Aminer Demo Tests") {
steps {
sh "echo \"Running AMiner demo tests.\""
}
}
stage("demo-config.py") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runAminerDemo demo/aminer/demo-config.py"
Expand Down Expand Up @@ -109,11 +114,11 @@ pipeline {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runAminerEncodingDemo demo/aminer/demo-config.yml"
}
}
}
}

stage("JSON/XML Input Tests") {
parallel {
stage("Declarative: JSON/XML Input Tests") {
steps {
sh "echo \"Running JSON/XML input tests.\""
}
}
stage("JSON Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runAminerJsonInputDemo"
Expand All @@ -124,47 +129,48 @@ pipeline {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runAminerXmlInputDemo"
}
}
stage("Aminer") {
stage("AMiner Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runJsonDemo demo/aminerJsonInputDemo/json-aminer-demo.yml"
}
}
stage("Elastic") {
stage("Elastic Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runJsonDemo demo/aminerJsonInputDemo/json-elastic-demo.yml"
}
}
stage("Eve") {
stage("Eve Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runJsonDemo demo/aminerJsonInputDemo/json-eve-demo.yml"
}
}
stage("Journal") {
stage("Journal Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runJsonDemo demo/aminerJsonInputDemo/json-journal-demo.yml"
}
}
stage("Wazuh") {
stage("Wazuh Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runJsonDemo demo/aminerJsonInputDemo/json-wazuh-demo.yml"
}
}
stage("Windows") {
stage("Windows Input Demo") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runJsonDemo demo/aminerJsonInputDemo/windows.yml"
}
}
}
}

stage("System, Documentation and Wiki Tests") {
parallel {
stage("Declarative: System, Documentation and Wiki Tests") {
steps {
sh "echo \"Running system, documentation and wiki tests.\""
}
}
stage("Available Configs") {
steps {
sh "docker run -m=2G --rm aecid/logdata-anomaly-miner-testing:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID runConfAvailableTest"
}
}
stage("Debian Bookworm") {
stage("Debian Bookworm Docker") {
steps {
script {
debianbookwormimage = true
Expand All @@ -179,7 +185,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-debian-bookworm:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("Debian Bullseye") {
stage("Debian Bullseye Docker") {
steps {
script {
debianbullseyeimage = true
Expand All @@ -194,7 +200,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-debian-bullseye:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("Test Debian Buster") {
stage("Debian Buster Docker") {
steps {
script {
debianbusterimage = true
Expand All @@ -209,7 +215,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-debian-buster:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("Test Production Docker Image") {
stage("Production Docker Image") {
steps {
script {
productionimage = true
Expand All @@ -224,7 +230,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-production:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("Test Ubuntu 22.04") {
stage("Ubuntu 22.04 Docker") {
when {
expression {
BRANCH_NAME == "main" || BRANCH_NAME == "development"
Expand All @@ -244,7 +250,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-ubuntu-2204:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("Test Ubuntu 20.04") {
stage("Ubuntu 20.04 Docker") {
when {
expression {
BRANCH_NAME == "main" || BRANCH_NAME == "development"
Expand All @@ -264,7 +270,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-ubuntu-2004:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("Fedora") {
stage("Fedora Docker") {
steps {
script {
fedoraimage = true
Expand All @@ -279,7 +285,7 @@ pipeline {
sh "docker run -v $PWD/persistency:/var/lib/aminer -v $PWD/logs:/logs --rm -t aecid/aminer-fedora:$JOB_BASE_NAME-$EXECUTOR_NUMBER-$BUILD_ID aminer"
}
}
stage("RedHat") {
stage("RedHat Docker") {
steps {
script {
redhatimage = true
Expand Down
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,9 @@ This tool parses log data and allows to define analysis pipelines for anomaly de

## Requirements

In order to install logdata-anomaly-miner a **Linux system** with **python >= 3.6** is required. **Debian-based** distributions are currently recommended.
In order to install logdata-anomaly-miner a **Linux system** with **python >= 3.6** is required. All **Ubuntu** and **Debian** versions that we have in the tests are currently recommended.
There is only experimental support for **Fedora**.
More specifically the tested systems include Debian Buster, Debian Bullseye, Debian Bookworm, Ubuntu 20.04, Ubuntu 22.04, Fedora (docker image fedora:latest), and RedHat (docker image redhat/ubi9).

_See [requirements.txt](https://github.com/ait-aecid/logdata-anomaly-miner/requirements.txt) for further module dependencies_

Expand All @@ -15,8 +17,7 @@ _See [requirements.txt](https://github.com/ait-aecid/logdata-anomaly-miner/requi

### Debian

There are Debian packages for logdata-anomaly-miner in the official Debian/Ubuntu
repositories.
There are Debian packages for logdata-anomaly-miner in the official Debian/Ubuntu repositories.

```
apt-get update && apt-get install logdata-anomaly-miner
Expand Down
1 change: 1 addition & 0 deletions aecid-testsuite/docker/Dockerfile_deb
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ LABEL maintainer="wolfgang.hotwagner@ait.ac.at"

# Install necessary debian packages
ARG DEBIAN_FRONTEND=noninteractive
RUN apt-get update && apt-get install -y --no-install-recommends apt-utils
RUN apt-get update && apt-get install -y \
ansible \
git \
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,14 +78,6 @@ def test1get_match_element_with_different_date_formats(self):
match_element = multi_locale_dtme.get_match_element(self.path, match_context)
self.compare_match_results(data, match_element, match_context, self.id_ + "/format4", self.path, date, 1549497600, None)

# test with only time defined. Here obviously the seconds can not be tested.
data = b"11:40:23: it still works"
date = b"11:40:23"
match_context = DummyMatchContext(data)
match_element = multi_locale_dtme.get_match_element(self.path, match_context)
self.compare_match_results(
data, match_element, match_context, self.id_ + "/format6", self.path, date, match_element.match_object, None)

data = b"Feb 25 something happened"
date = b"Feb 25"
match_context = DummyMatchContext(data)
Expand Down Expand Up @@ -131,18 +123,32 @@ def test1get_match_element_with_different_date_formats(self):
match_element = multi_locale_dtme.get_match_element(self.path, match_context)
self.compare_match_results(data, match_element, match_context, self.id_ + "/format12", self.path, date, 1618326774.201, None)

multi_locale_dtme = MultiLocaleDateTimeModelElement(self.id_, [
(b"%d.%m.%Y %H:%M:%S.%f", None, None), (b"%d.%m.%Y %H:%M:%S%z", None, None), (b"%d.%m.%Y %H:%M:%S", None, None),
(b"%d.%m.%YT%H:%M:%S", None, None), (b"%d.%m.%Y", None, None), (b"%H:%M:%S:%f", None, de_at_utf8),
(b"%H:%M:%S", None, None), (b"%b %d", tz_gmt10, de_at_utf8), (b"%d %b %Y", None, en_gb_utf8),
(b"%dth %b %Y", None, en_gb_utf8), (b"%d/%m/%Y", None, en_gb_utf8), (b"%m-%d-%Y", None, en_us_utf8),
(b"%d.%m. %H:%M:%S:%f", None, de_at_utf8)])
multi_locale_dtme.latest_parsed_timestamp = None
# Austrian time no date
data = b"15:12:54:201 something happened"
date = b"15:12:54:201"
match_context = DummyMatchContext(data)
match_element = multi_locale_dtme.get_match_element(self.path, match_context)
dtm = datetime(2021, datetime.now().month, datetime.now().day, 15, 12, 54, 201, tzinfo=timezone.utc)
dtm = datetime(datetime.now().year, datetime.now().month, datetime.now().day, 15, 12, 54, 201, tzinfo=timezone.utc)
# total_seconds should be in UTC, so the timezones are parsed out.
delta = (dtm - datetime(1970, 1, 1, tzinfo=dtm.tzinfo))
total_seconds = delta.days * 86400 + delta.seconds + delta.microseconds / 1000
self.compare_match_results(data, match_element, match_context, self.id_ + "/format5", self.path, date, total_seconds, None)

# test with only time defined. Here obviously the seconds can not be tested.
data = b"11:40:23: it still works"
date = b"11:40:23"
match_context = DummyMatchContext(data)
match_element = multi_locale_dtme.get_match_element(self.path, match_context)
self.compare_match_results(
data, match_element, match_context, self.id_ + "/format6", self.path, date, match_element.match_object, None)

def test2wrong_date(self):
"""Test if wrong input data does not return a match."""
tz_gmt10 = pytz.timezone("Etc/GMT+10")
Expand Down

0 comments on commit ea46a83

Please sign in to comment.