diff --git a/.gitignore b/.gitignore
index 23bb5db..e204fb9 100644
--- a/.gitignore
+++ b/.gitignore
@@ -2,15 +2,18 @@
/docs/build/
/docs/lib/
/docs/bin/
-
# Byte-compiled / optimized / DLL files
__pycache__/
-*.py[cod]
+*.py[codz]
*$py.class
+# C extensions
+*.so
+
# folders generated by setup.py
build
dist
+app_uploaded_files/
/logai/data/
/logai/.ipynb_checkpoints/
/results/
@@ -32,10 +35,206 @@ dist
/logai/tutorials/datasets/
/.DS_Store
/gui/.DS_Store
-/.idea
+# Distribution / packaging
+.venv
+venv
+.DS_Store
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
-docs/.DS_Store
-/docs/_build/
-/.pytest_cache/
-/logai/.idea/
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py.cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# UV
+# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+#uv.lock
+
+# poetry
+# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
+# This is especially recommended for binary packages to ensure reproducibility, and is more
+# commonly ignored for libraries.
+# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
+#poetry.lock
+#poetry.toml
+
+# pdm
+# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
+# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
+# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
+#pdm.lock
+#pdm.toml
+.pdm-python
+.pdm-build/
+
+# pixi
+# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
+#pixi.lock
+# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
+# in the .venv directory. It is recommended not to include this directory in version control.
+.pixi
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.envrc
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+# PyCharm
+# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
+# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
+# and can be added to the global gitignore or merged into this file. For a more nuclear
+# option (not recommended) you can uncomment the following to ignore the entire idea folder.
+#.idea/
+
+# Abstra
+# Abstra is an AI-powered process automation framework.
+# Ignore directories containing user credentials, local state, and settings.
+# Learn more at https://abstra.io/docs
+.abstra/
+
+# Visual Studio Code
+# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
+# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
+# and can be added to the global gitignore or merged into this file. However, if you prefer,
+# you could uncomment the following to ignore the entire vscode folder
+# .vscode/
+
+# Ruff stuff:
+.ruff_cache/
+
+# PyPI configuration file
+.pypirc
+
+# Cursor
+# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to
+# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data
+# refer to https://docs.cursor.com/context/ignore-files
+.cursorignore
+.cursorindexingignore
+
+# Marimo
+marimo/_static/
+marimo/_lsp/
+__marimo__/
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
deleted file mode 100644
index 8287f73..0000000
--- a/CODE_OF_CONDUCT.md
+++ /dev/null
@@ -1,112 +0,0 @@
-
-# Salesforce Open Source Community Code of Conduct
-
-## About the Code of Conduct
-
-Equality is a core value at Salesforce. We believe a diverse and inclusive
-community fosters innovation and creativity, and are committed to building a
-culture where everyone feels included.
-
-Salesforce open-source projects are committed to providing a friendly, safe, and
-welcoming environment for all, regardless of gender identity and expression,
-sexual orientation, disability, physical appearance, body size, ethnicity, nationality,
-race, age, religion, level of experience, education, socioeconomic status, or
-other similar personal characteristics.
-
-The goal of this code of conduct is to specify a baseline standard of behavior so
-that people with different social values and communication styles can work
-together effectively, productively, and respectfully in our open source community.
-It also establishes a mechanism for reporting issues and resolving conflicts.
-
-All questions and reports of abusive, harassing, or otherwise unacceptable behavior
-in a Salesforce open-source project may be reported by contacting the Salesforce
-Open Source Conduct Committee at ossconduct@salesforce.com.
-
-## Our Pledge
-
-In the interest of fostering an open and welcoming environment, we as
-contributors and maintainers pledge to making participation in our project and
-our community a harassment-free experience for everyone, regardless of gender
-identity and expression, sexual orientation, disability, physical appearance,
-body size, ethnicity, nationality, race, age, religion, level of experience, education,
-socioeconomic status, or other similar personal characteristics.
-
-## Our Standards
-
-Examples of behavior that contributes to creating a positive environment
-include:
-
-* Using welcoming and inclusive language
-* Being respectful of differing viewpoints and experiences
-* Gracefully accepting constructive criticism
-* Focusing on what is best for the community
-* Showing empathy toward other community members
-
-Examples of unacceptable behavior by participants include:
-
-* The use of sexualized language or imagery and unwelcome sexual attention or
-advances
-* Personal attacks, insulting/derogatory comments, or trolling
-* Public or private harassment
-* Publishing, or threatening to publish, others' private information—such as
-a physical or electronic address—without explicit permission
-* Other conduct which could reasonably be considered inappropriate in a
-professional setting
-* Advocating for or encouraging any of the above behaviors
-
-## Our Responsibilities
-
-Project maintainers are responsible for clarifying the standards of acceptable
-behavior and are expected to take appropriate and fair corrective action in
-response to any instances of unacceptable behavior.
-
-Project maintainers have the right and responsibility to remove, edit, or
-reject comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned with this Code of Conduct, or to ban temporarily or
-permanently any contributor for other behaviors that they deem inappropriate,
-threatening, offensive, or harmful.
-
-## Scope
-
-This Code of Conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community. Examples of
-representing a project or community include using an official project email
-address, posting via an official social media account, or acting as an appointed
-representative at an online or offline event. Representation of a project may be
-further defined and clarified by project maintainers.
-
-## Enforcement
-
-Instances of abusive, harassing, or otherwise unacceptable behavior may be
-reported by contacting the Salesforce Open Source Conduct Committee
-at ossconduct@salesforce.com. All complaints will be reviewed and investigated
-and will result in a response that is deemed necessary and appropriate to the
-circumstances. The committee is obligated to maintain confidentiality with
-regard to the reporter of an incident. Further details of specific enforcement
-policies may be posted separately.
-
-Project maintainers who do not follow or enforce the Code of Conduct in good
-faith may face temporary or permanent repercussions as determined by other
-members of the project's leadership and the Salesforce Open Source Conduct
-Committee.
-
-## Attribution
-
-This Code of Conduct is adapted from the [Contributor Covenant][contributor-covenant-home],
-version 1.4, available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html.
-It includes adaptions and additions from [Go Community Code of Conduct][golang-coc],
-[CNCF Code of Conduct][cncf-coc], and [Microsoft Open Source Code of Conduct][microsoft-coc].
-
-This Code of Conduct is licensed under the [Creative Commons Attribution 3.0 License][cc-by-3-us].
-
-[contributor-covenant-home]: https://www.contributor-covenant.org (https://www.contributor-covenant.org/)
-[golang-coc]: https://golang.org/conduct
-[cncf-coc]: https://github.com/cncf/foundation/blob/master/code-of-conduct.md
-[microsoft-coc]: https://opensource.microsoft.com/codeofconduct/
-[cc-by-3-us]: https://creativecommons.org/licenses/by/3.0/us/
\ No newline at end of file
diff --git a/LICENSE b/LICENSE
new file mode 100644
index 0000000..4bb780d
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,28 @@
+BSD 3-Clause License
+
+Copyright (c) 2025, Arumugam
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions of source code must retain the above copyright notice, this
+ list of conditions and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the above copyright notice,
+ this list of conditions and the following disclaimer in the documentation
+ and/or other materials provided with the distribution.
+
+3. Neither the name of the copyright holder nor the names of its
+ contributors may be used to endorse or promote products derived from
+ this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
+AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
+FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
+SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
+CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
+OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/MANIFEST.in b/MANIFEST.in
index 1f5df01..64ad321 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,6 +1 @@
-recursive-include logai *.yaml *.json
-
-recursive-exclude logai/results *
-recursive-exclude logai/data/ *
-
-include requirements.txt
+include README.md LICENSE
diff --git a/README.md b/README.md
index 5eced54..e6415a1 100644
--- a/README.md
+++ b/README.md
@@ -1,10 +1,6 @@
-
@@ -12,295 +8,76 @@ For full license text, see the LICENSE file in the repo root or https://opensour
-
-
-# LogAI: A Library for Log Analytics and Intelligence
-
## Table of Contents
-* [Introduction](#introduction)
-* [Installation](#installation)
-* [Getting Started](#getting-started)
- * [Explore LogAI GUI Portal](#explore-logai-gui-portal)
- * [Run Simple Time-series Anomaly Detection Application](#run-simple-time-series-anomaly-detection-application)
- * [Build Customized LogAI Applications](#build-customized-logai-applications)
- * [Deep-learning Anomaly Detection Benchmarking](#deep-learning-anomaly-detection-benchmarking)
-* [Documentation](#documentation)
-* [Technical Report and Citing LogAI](#technical-report-and-citing-logai)
-* [Contact](#contact)
-* [License](#license)
+- [RDK\_One3B](#rdk_one3b)
+ - [Table of Contents](#table-of-contents)
+ - [Introduction](#introduction)
+- [Getting Started](#getting-started)
+ - [Installation](#installation)
+ - [Explore RDK\_One3B GUI Portal](#explore-rdk_one3b-gui-portal)
+ - [Log Summarization](#log-summarization)
+ - [Log Parsing](#log-parsing)
+ - [Log Clustering](#log-clustering)
+ - [Log Anomaly Detection](#log-anomaly-detection)
+ - [Log Report with LLaMa](#log-report-with-llama)
+ - [Reference](#reference)
+ - [License](#license)
## Introduction
-LogAI is a one-stop open source library for log analytics and intelligence. LogAI supports various log analytics and log intelligence tasks such as log summarization, log clustering, log anomaly detection and more. It adopts the OpenTelemetry data model, to enable compatibility with different log management platforms. LogAI provides a unified model interface and integrates popular time-series, statistical learning and deep
-learning models. Alongside this, LogAI also provides an out-of-the-box GUI toolkit for users to conduct interactive log
-analysis. With LogAI, we can also easily benchmark popular ML and deep-learning algorithms for log anomaly detection
-without putting in redundant effort to process the logs. We have opensourced LogAI to facilitate a wide range of
-applications benefiting both academic research and industrial prototyping.
-
-The following table compares LogAI with several existing log analysis Tools, include both commercial log management platforms
-like NewRelic and DataDog, or popular log analysis open source tools on Github.
-
-| Coverage | LogAI | NewRelic Log Monitoring | DataDog Log Explorer | logparser | loglizer | deep-loglizer | log3C |
-| ------------- | ------------- | ------------- | ------------- | ------------- | ------------- | ------------- | ------------- |
-| OpenTelemetry log data model | :white_check_mark: | :white_check_mark: | :white_check_mark: | | | | | |
-| Unified data loader and preprocessing | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: | |
-| Auto log parsing | :white_check_mark: | :white_check_mark: | :white_check_mark: | | |
-| Log clustering | :white_check_mark: | :white_check_mark: | :white_check_mark: | | | | :white_check_mark: |
-| Log anomaly detection - time-series | :white_check_mark: | :white_check_mark: | :white_check_mark: | | | | | |
-| Log anomaly detection - traditional ML | :white_check_mark: | | | | :white_check_mark: | | |
-| Log anomaly detection - deep Learning | :white_check_mark: | | | | :white_check_mark: | :white_check_mark: | |
-| Huggingface integration | :white_check_mark: | | | | | | |
-| GUI for result visualization | :white_check_mark: | :white_check_mark: | :white_check_mark: | | | | |
+# Getting Started
## Installation
-### Quick Install
-You can install LogAI core library using `pip install`:
-
```shell
-# Check out LogAI code repo from Github
-git clone https://github.com/salesforce/logai.git
-cd logai
+# Check out rdk_one3b code repo from Github
+git clone https://git.yo-digital.com/hackfest6/rdk-one3b.git
+cd rdk-one3b
# [Optional] Create virtual environment
python3 -m venv venv
source venv/bin/activate
-# Install LogAI
-pip install logai
-
-```
-
-### Install Optional Dependencies
-LogAI core library is light-weight with limited dependent packages installed. Users can install optional dependencies
-to enable extended functionalities of LogAI.
-
-**Deep Learning Log Analysis**. To conduct deep learning model related tasks and run benchmarking,
-please install extra requirements by `pip install "logai[deep-learning]"`.
-
-**Enable LogAI GUI Portal***. To use LogAI GUI portal,
-please install extra requirements by `pip install "logai[gui]"`.
-
-**LogAI Development**. To contribute to LogAI development, build and test code changes,
-please install extra requirements by `pip install "logai[dev]"`.
-
-**Complete Installation**. you can install the full list of dependencies by `pip install "logai[all]"`.
-
-### Known Issues
-
-> :warning: You may see `Resource punkt not found` while using LogAI. You can download `punkt`
-> package from NLTK to solve the problem.
-> ```shell
-> python -m nltk.downloader punkt
-> ```
-
-## Getting Started
-
-Below we briefly introduce several ways to explore and use LogAI, including exploring LogAI GUI
-portal, benchmarking deep-learning based log anomaly detection using LogAI, and building your
-own log analysis application with LogAI.
+# install dependencies
+pip install -r requirements.txt
+pip install nltk
+python -m nltk.downloader punkt_tab
-### Explore LogAI GUI Portal
+# make sure to add current root to PYTHONPATH
+export PYTHONPATH='.'
+python3 logai/gui/application.py # Run local plotly dash server.
-You can also start a local LogAI service and use the GUI portal to explore LogAI.
-
-```shell
-
-# Check out LogAI code repo from Github
-git clone https://github.com/salesforce/logai.git
-cd logai
-
-# [Optional] Create virtual environment
-python3 -m venv venv # create virtual environment
-source venv/bin/activate # activate virtual env
-
-# install LogAI and GUI dependencies
-pip install ".[dev]"
-pip install ".[gui]"
-
-# Start LogAI service
-export PYTHONPATH='.' # make sure to add current root to PYTHONPATH
-python3 gui/application.py # Run local plotly dash server.
```
-Then open the LogAI portal via http://localhost:8050/ or http://127.0.0.1:8050/ in your browser:
-
-
-
-The control panel is on the left side of the page. There are three applications you can choose from:
-Log Summarization, Log Clustering and Anomaly Detection.
-
-#### Control Panel
-
-**File Setting**. You can select the log type and log files to be processed. Now LogAI supports three
-public datasets: HDFS, BGL and HealthApp. For each log type we included several sample log data. After
-log file selected, you can choose the attributes you want to be involved in log processing. The selected
-attributes will be treated as structured log attributes.
-
-**Algorithm Setting**. For different applications, the algorithms options may be different.
-For example, auto-log parsing algorithms are utilized for log summarization, but log clustering uses auto-parsing algorithms,
- vectorization algorithms, categorical encoding and clustering algorithms. You can select an
- algorithm and change the parameters in each algorithm section. After algorithm configuration, simply click "run" to
-run the application.
-
-#### Log Summarization
-
-Log summarization App summarize and group the raw logs by log patterns and attributes. You can click on
-each log pattern and see what the pattern looks like and the dynamic values in each position. You can also
-see the chart of occurrance trend of this pattern on the right side.
-
-
+Then open the RDK_One3B App via http://localhost:8050/ or http://127.0.0.1:8050/ in your browser
-#### Log Clustering
+## Explore RDK_One3B GUI Portal
-Log clustering App groups raw logs into clusters by calculating the semantic representation of each logline.
-Then using clustering algorithms to generate log clusters. In this example, we choose k-mean where `k==8` to
-generate 8 clusters. The result is shown as a pie chart and you can click each portion of the pie chart to check
-the raw logs in this cluster.
+### Log Summarization
+We use the various values obtained through telemetry, such as `SSID.Status`, `Radio.Status`, etc., to summarise what state the device has been in the last few hours as per the logs obtained.
+
-
+### Log Parsing
+**Drain3** uses a tree-based algorithm to extract structured **log templates** from raw log messages. This reduces high-cardinality logs into generalized patterns. After parsing, the logs are transformed into a feature-rich format using template frequency, token-level statistics, temporal features and metadata (e.g., source, severity, etc.). This prepares the data for classification and clustering algorithms.
-#### Anomaly Detection
+We parse the various logs present under rdklogs with the help of drain3 to see what logs have been occurring the most frequently in the last few hours on the device. It is done on a single file basis here.
+
-Log anomaly detection App conduct log anomaly detection tasks. Similar to log clustering, log anomaly detection
-also needs to extract information from raw logs and generate representation of loglines. Depend on the type of anomaly detection,
-The representation can be different.
-**Time-series anomaly detection**. If we use time-series algorithm like ETS, the raw logs will be converted
-into log counter vectors by given time interval. Then ETS is performed on the generated log counter vectors and detect
-anomalous timestamps on the counter vector time-series.
-
-**Semantic anomaly detection**. If we use unsupervised outlier detection algorithms such as One-class SVM, the raw logs will
-be converted into semantic vectors and feed the One-class SVM model. Then the model will detect anomalous loglines.
-
-
-
-LogAI GUI portal is just an example to demo LogAI capabilities. We know this may not be the best way to visualize the
-results and there might be bugs in how the results are displayed. We will keep working with the open source community
-to improve usability of the portal. Any feedbacks and contributions are welcome :blush:.
-
-### Run Simple Time-series Anomaly Detection Application
-
-You can also use LogAI in more programtic ways. LogAI supports configuration files in `.json` or `.yaml`.
-Below is a sample `log_anomaly_detection_config.json` configuration for anomaly detection application.
-Make sure to set `filepath` to the target log dataset file path.
-
-```json
-{
- "open_set_data_loader_config": {
- "dataset_name": "HDFS",
- "filepath": ""
- },
- "preprocessor_config": {
- "custom_delimiters_regex":[]
- },
- "log_parser_config": {
- "parsing_algorithm": "drain",
- "parsing_algo_params": {
- "sim_th": 0.5,
- "depth": 5
- }
- },
- "feature_extractor_config": {
- "group_by_category": ["Level"],
- "group_by_time": "1s"
- },
- "log_vectorizer_config": {
- "algo_name": "word2vec"
- },
- "categorical_encoder_config": {
- "name": "label_encoder"
- },
- "anomaly_detection_config": {
- "algo_name": "one_class_svm"
- }
- }
-```
-
-Then to run log anomaly detection. You can simply create below python script:
-
-```python
-import json
-
-from logai.applications.application_interfaces import WorkFlowConfig
-from logai.applications.log_anomaly_detection import LogAnomalyDetection
-
-# path to json configuration file
-json_config = "./log_anomaly_detection_config.json"
-
-# Create log anomaly detection application workflow configuration
-config = json.loads(json_config)
-workflow_config = WorkFlowConfig.from_dict(config)
-
-# Create LogAnomalyDetection Application for given workflow_config
-app = LogAnomalyDetection(workflow_config)
-
-# Execute App
-app.execute()
-
-```
-
-Then you can check anomaly detection results by calling `app.anomaly_results`.
-
-For full context of this example please check
-[Tutorial: Use Log Anomaly Detection Application](./examples/jupyter_notebook/log_anomaly_detection_example.ipynb).
-
-### Build Customized LogAI Applications
-You can build your own customized log analysis applications using LogAI. Here we show two examples:
-
-* [Tutorial: Log Clustering Using LogAI](./examples/jupyter_notebook/tutorial_log_clustering.ipynb)
-* [Tutorial: Log Anomaly Detection Using LogAI](./examples/jupyter_notebook/tutorial_log_anomaly_detection.ipynb)
-
-### Deep-learning Anomaly Detection Benchmarking
-
-LogAI can be used to benchmark deep-learning anomaly detection results.
-A [tutorial](examples/jupyter_notebook/tutorial_deep_ad.md) is provided for
-Anomaly Detection Benchmarking using LSTM anomaly detector for HDFS Dataset. More examples of deep-learning anomaly
-detection benchmarking on different datasets and algorithms can be found in
-[Deep Anomaly Detection Benchmarking Examples](examples/jupyter_notebook/nn_ad_benchmarking).
-
-## Documentation
-
-For more detail about LogAI library and advanced use cases, please visit
-[LogAI Documentation](https://opensource.salesforce.com/logai).
+### Log Clustering
+**TensorFlow Decision Forests** (TF-DF) is used to train a supervised model on labeled logs. It's a scalable, interpretable tree-based model ideal for classifying logs into categories like `INFO`, `ERROR`, `SECURITY_ALERT` and detecting known failure types or operational events
+
+We form clusters for the various logs present under rdklogs with the help of drain3 to see what log patterns have been occurring the most frequently in the last few hours on the device. It is done on both a single file and multi file basis here.
+
-## Technical Report and Citing LogAI
+### Log Anomaly Detection
+After parsing, the logs are transformed into a feature-rich format using template frequency, token-level statistics, temporal features and metadata (e.g., source, severity, etc.). This prepares the data for classification and clustering algorithms.
-You can find more details about LogAI in the [technical report](https://arxiv.org/abs/2301.13415).
-If you're using LogAI in your research or applications, please cite using this BibTeX:
+
-```
-@misc{https://doi.org/10.48550/arxiv.2301.13415,
- title = {LogAI: A Library for Log Analytics and Intelligence},
- author = {Cheng, Qian and Saha, Amrita and Yang, Wenzhuo and Liu, Chenghao and Sahoo, Doyen and Hoi, Steven},
- publisher = {arXiv},
- year = {2023},
- doi = {10.48550/ARXIV.2301.13415},
- url = {https://arxiv.org/abs/2301.13415},
- copyright = {arXiv.org perpetual, non-exclusive license}
-}
-
-```
-
-## Contact
-If you have any questions, comments or suggestions,
-please do not hesitate to contact us at [logai@salesforce.com](logai@salesforce.com).
+### Log Report with LLaMa
+Using LLaMA (a large language model), the pipeline generates natural language **summaries** of anomaly clusters, **explanations** of complex or unknown logs and AI-powered insights for DevOps or security teams
+## Reference
+ [Salesforce LogAI](https://github.com/salesforce/logai) A Library for Log Analytics and Intelligence
## License
[BSD 3-Clause License](LICENSE.txt)
-
diff --git a/SECURITY.md b/SECURITY.md
deleted file mode 100644
index 8557797..0000000
--- a/SECURITY.md
+++ /dev/null
@@ -1,14 +0,0 @@
-
-## Security
-
-Please report any security issue to [security@salesforce.com](mailto:security@salesforce.com)
-as soon as it is discovered. This library limits its runtime dependencies in
-order to reduce the total cost of ownership as much as can be, but all consumers
-should remain vigilant and have their security stakeholders review all third-party
-products (3PP) like this one and their dependencies.
\ No newline at end of file
diff --git a/column_list.txt b/column_list.txt
new file mode 100644
index 0000000..e1ac034
--- /dev/null
+++ b/column_list.txt
@@ -0,0 +1,7 @@
+Report.Time
+Report.mac
+Report.Device.WiFi.SSID.1.Status
+Report.Device.WiFi.Radio.1.Status
+Report.Device.WiFi.SSID.2.Status
+Report.Device.WiFi.Radio.2.Status
+
diff --git a/configs/__init__.py b/configs/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/configs/airties.json b/configs/airties.json
new file mode 100644
index 0000000..5ee7199
--- /dev/null
+++ b/configs/airties.json
@@ -0,0 +1,39 @@
+{
+ "_comment":"2025-06-03 15:21:20-[00352190]-[con]multiap-controller[20554]:[ctrl][tlv_parser] map_parse_assoc_wifi6_sta_status_tlv: sta[d2:4a:89:04:00:00] not found",
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%d %H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Module", "Submodule"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":["-", "[", "]"],
+ "custom_replace_list": {}
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/boottime.json b/configs/boottime.json
new file mode 100644
index 0000000..e69de29
diff --git a/configs/ccspcujoagent_log.json b/configs/ccspcujoagent_log.json
new file mode 100644
index 0000000..ec94e9a
--- /dev/null
+++ b/configs/ccspcujoagent_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "CCSPCUJOAGENT\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/cellularmanager_log.json b/configs/cellularmanager_log.json
new file mode 100644
index 0000000..f40f326
--- /dev/null
+++ b/configs/cellularmanager_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "CELLULARMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/config_list.json b/configs/config_list.json
new file mode 100644
index 0000000..797c289
--- /dev/null
+++ b/configs/config_list.json
@@ -0,0 +1,184 @@
+{
+ "supported_files": [
+ {
+ "name": "Airties",
+ "supported_config": "airties.json",
+ "supported_files": ["ArmConsolelog"]
+ },
+ {
+ "name": "ArmConsole",
+ "supported_config": "console_log.json",
+ "supported_files": ["ArmConsolelog"]
+ },
+ {
+ "name": "BootTime",
+ "supported_config": "boottime.json",
+ "supported_files": ["BootTime"]
+ },
+ {
+ "name": "LMLite",
+ "supported_config": "lmlite.json",
+ "supported_files": ["LM"]
+ },
+ {
+ "name": "PandM_Log",
+ "supported_config": "pandm_log.json",
+ "supported_files": ["PAMlog"]
+ },
+ {
+ "name": "PSM_log",
+ "supported_config": "psm_log.json",
+ "supported_files": ["PSMlog"]
+ },
+ {
+ "name": "WEBPA",
+ "supported_config": "webpa.json",
+ "supported_files": ["WEBPAlog"]
+ },
+ {
+ "name": "WiFi_Log",
+ "supported_config": "wifi_log.json",
+ "supported_files": ["WiFiLog"]
+ },
+ {
+ "name": "Telemetry_2",
+ "supported_config": "telemetry2.json",
+ "supported_files": ["telemetry2_0"]
+ },
+ {
+ "name": "TelcoVoiceManager",
+ "supported_config": "telco_voice_mgr.json",
+ "supported_files": ["TELCOVOICEMANAGERLog"]
+ },
+ {
+ "name": "StateInfoManager",
+ "supported_config": "state_info_manager.json",
+ "supported_files": ["STATEINFOMANAGERLog"]
+ },
+ {
+ "name": "PARODUS",
+ "supported_config": "parodus.json",
+ "supported_files": ["PARODUSlog"]
+ },
+ {
+ "name": "Syslog_NG",
+ "supported_config": "syslog_ng.json",
+ "supported_files": ["syslog"]
+ },
+ {
+ "name": "CUJOAgent",
+ "supported_config": "cujo_agent.json",
+ "supported_files": ["CUJOAGENT"]
+ },
+ {
+ "name": "Messages",
+ "supported_config": "console_log.json",
+ "supported_files": ["messages"]
+ },
+ {
+ "name": "VLANManagerLog",
+ "supported_config": "vlanmanager_log.json",
+ "supported_files": ["VLANManagerLog"]
+ },
+ {
+ "name": "WEBPAlog",
+ "supported_config": "webpa_log.json",
+ "supported_files": ["WEBPAlog"]
+ },
+ {
+ "name": "CCSPCUJOAGENTlog",
+ "supported_config": "ccspcujoagent_log.json",
+ "supported_files": ["CCSPCUJOAGENTlog"]
+ },
+ {
+ "name": "CELLULARMANAGERlog",
+ "supported_config": "cellularmanager_log.json",
+ "supported_files": ["CELLULARMANAGERLog"]
+ },
+ {
+ "name": "EnrolmentAgentlog",
+ "supported_config": "enrollmentagent_log.json",
+ "supported_files": ["EnrolmentAgentLog"]
+ },
+ {
+ "name": "ETHAGENTlog",
+ "supported_config": "ethagent_log.json",
+ "supported_files": ["ETHAGENTLog"]
+ },
+ {
+ "name": "GponManagerlog",
+ "supported_config": "gponmanager_log.json",
+ "supported_files": ["GponManagerLog"]
+ },
+ {
+ "name": "IPv6Privacylog",
+ "supported_config": "ipv6privacy_log.json",
+ "supported_files": ["IPV6PrivacyLog"]
+ },
+ {
+ "name": "NOTIFYlog",
+ "supported_config": "notify_log.json",
+ "supported_files": ["NOTIFYLog"]
+ },
+ {
+ "name": "PPPManagerlog",
+ "supported_config": "pppmanager_log.json",
+ "supported_files": ["PPPManagerLog"]
+ },
+ {
+ "name": "RTMessagelog",
+ "supported_config": "rtmessage_log.json",
+ "supported_files": ["RTMessagelog"]
+ },
+ {
+ "name": "SmartHomeAgentLog",
+ "supported_config": "smarthomeagent_log.json",
+ "supported_files": ["SmartHomeAgentLog"]
+ },
+ {
+ "name": "TDMlog",
+ "supported_config": "tdm_log.json",
+ "supported_files": ["TDMlog"]
+ },
+ {
+ "name": "TELCOVOICEMANAGERLog",
+ "supported_config": "telecovoice_log.json",
+ "supported_files": ["TELCOVOICEMANAGERLog"]
+ },
+ {
+ "name": "VLANManagerLog",
+ "supported_config": "vlanmanager_log.json",
+ "supported_files": ["VLANManagerLog"]
+ },
+ {
+ "name": "VPNManagerLog",
+ "supported_config": "vpnmanager_log.json",
+ "supported_files": ["VPNManagerLog"]
+ },
+ {
+ "name": "WANMANAGERLog",
+ "supported_config": "wanmanager_log.json",
+ "supported_files": ["WANMANAGERLog"]
+ },
+ {
+ "name": "WEBPAlog",
+ "supported_config": "webpa_log.json",
+ "supported_files": ["WEBPAlog"]
+ },
+ {
+ "name": "xconf",
+ "supported_config": "xconf_log.json",
+ "supported_files": ["xconf"]
+ },
+ {
+ "name": "XDSLMANAGERLog",
+ "supported_config": "xdslmanager_log.json",
+ "supported_files": ["XDSLMANAGERLog"]
+ },
+ {
+ "name": "VoIPlog",
+ "supported_config": "voip_log.json",
+ "supported_files": ["VoIPlog"]
+ }
+ ]
+}
diff --git a/configs/console_log.json b/configs/console_log.json
new file mode 100644
index 0000000..e69de29
diff --git a/configs/cujo_agent.json b/configs/cujo_agent.json
new file mode 100644
index 0000000..a2d7ac1
--- /dev/null
+++ b/configs/cujo_agent.json
@@ -0,0 +1,40 @@
+
+{
+ "_commnet": "2025-06-03T15:21:41 telekom: CUJOAGENT [tracer] INFO trace [timer=tracer cpu%=2.6444200866088559 vsize_kb=146988 rss_kb=22128 lua_mem_pre_gc_kb=1095.74609375 lua_mem_kb=1077.017578125 lua_refs=53 uptime_s=74700]",
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Module", "Level1", "Level2"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":["="],
+ "custom_replace_list": []
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/enrollmentagent_log.json b/configs/enrollmentagent_log.json
new file mode 100644
index 0000000..260f443
--- /dev/null
+++ b/configs/enrollmentagent_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "ENROLMENTAGENT\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/ethagent_log.json b/configs/ethagent_log.json
new file mode 100644
index 0000000..830a50d
--- /dev/null
+++ b/configs/ethagent_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "ETHAGENT\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/gponmanager_log.json b/configs/gponmanager_log.json
new file mode 100644
index 0000000..4783041
--- /dev/null
+++ b/configs/gponmanager_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "GPONMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/ipv6privacy_log.json b/configs/ipv6privacy_log.json
new file mode 100644
index 0000000..0203908
--- /dev/null
+++ b/configs/ipv6privacy_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "IPV6PRIVACY\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/lmlite.json b/configs/lmlite.json
new file mode 100644
index 0000000..823600c
--- /dev/null
+++ b/configs/lmlite.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "LM\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/notify_log.json b/configs/notify_log.json
new file mode 100644
index 0000000..0c5a1d7
--- /dev/null
+++ b/configs/notify_log.json
@@ -0,0 +1,34 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "NOTFIY\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ }
+}
diff --git a/configs/pandm_log.json b/configs/pandm_log.json
new file mode 100644
index 0000000..8b0acf7
--- /dev/null
+++ b/configs/pandm_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "PAM\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/parodus.json b/configs/parodus.json
new file mode 100644
index 0000000..18cc82f
--- /dev/null
+++ b/configs/parodus.json
@@ -0,0 +1,43 @@
+{
+ "_comment":"2025-06-03T15:21:49 speedport: PARODUS.INFO [tid=12672] PARODUS: Ping received with payload mac:34194dc2f81f, opcode 9",
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "PARODUS\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/pppmanager_log.json b/configs/pppmanager_log.json
new file mode 100644
index 0000000..a485039
--- /dev/null
+++ b/configs/pppmanager_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "PPPMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/psm_log.json b/configs/psm_log.json
new file mode 100644
index 0000000..cfe8de1
--- /dev/null
+++ b/configs/psm_log.json
@@ -0,0 +1,43 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "PSM\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ ,
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/rtmessage_log.json b/configs/rtmessage_log.json
new file mode 100644
index 0000000..8f5c0ed
--- /dev/null
+++ b/configs/rtmessage_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "RTMESSAGE\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/smarthomeagent_log.json b/configs/smarthomeagent_log.json
new file mode 100644
index 0000000..353e151
--- /dev/null
+++ b/configs/smarthomeagent_log.json
@@ -0,0 +1,43 @@
+{
+
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "CCSPSMARTHOMEAGENT\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/state_info_manager.json b/configs/state_info_manager.json
new file mode 100644
index 0000000..9e3e402
--- /dev/null
+++ b/configs/state_info_manager.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "STATEINFOMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/syslog_ng.json b/configs/syslog_ng.json
new file mode 100644
index 0000000..7ee32be
--- /dev/null
+++ b/configs/syslog_ng.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
+
diff --git a/configs/tdm_log.json b/configs/tdm_log.json
new file mode 100644
index 0000000..cc2200d
--- /dev/null
+++ b/configs/tdm_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "TDM\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/telco_voice_mgr.json b/configs/telco_voice_mgr.json
new file mode 100644
index 0000000..413aa87
--- /dev/null
+++ b/configs/telco_voice_mgr.json
@@ -0,0 +1,43 @@
+{
+ "_comment":"2025-06-03T15:30:43 speedport: TELCOVOICEMANAGER.INFO [tid=2672] get_event_param:1144 Event name = voiceServiceUpdateObject",
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "TELCOVOICEMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/telecovoice_log.json b/configs/telecovoice_log.json
new file mode 100644
index 0000000..a0b35ef
--- /dev/null
+++ b/configs/telecovoice_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "TELECOVOICEMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/telemetry2.json b/configs/telemetry2.json
new file mode 100644
index 0000000..89a3692
--- /dev/null
+++ b/configs/telemetry2.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "T2\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/test.json b/configs/test.json
new file mode 100644
index 0000000..525d2a9
--- /dev/null
+++ b/configs/test.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "XDSLMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/version.json b/configs/version.json
new file mode 100644
index 0000000..81f1d80
--- /dev/null
+++ b/configs/version.json
@@ -0,0 +1,50 @@
+"""
+MACHINE_NAME=HGW01A-ARC
+imagename:HGW01A-ARC_v004.011.070#2025:05:16
+BRANCH=rdkb-2022q3-dunfell-dt
+VERSION=004.011.070
+SPIN=070
+BUILD-TIME="2025-05-16 14:34:44"
+WORKFLOW-TYPE=REL-RDK
+BUILD-ID=43962133
+Generated on Fri May 16 14:34:44 UTC 2025
+SDK_VERSION=ecnt--sdk-7.3.283.3100_v187
+"""
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%d %H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Module", "Submodule"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":["-"],
+ "custom_replace_list": {}
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/vlanmanager_log.json b/configs/vlanmanager_log.json
new file mode 100644
index 0000000..84fc93d
--- /dev/null
+++ b/configs/vlanmanager_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "VLANMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/voip_log.json b/configs/voip_log.json
new file mode 100644
index 0000000..d3fc7c7
--- /dev/null
+++ b/configs/voip_log.json
@@ -0,0 +1,41 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "VOIP\\.([\\w]+)": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/vpnmanager_log.json b/configs/vpnmanager_log.json
new file mode 100644
index 0000000..963ec1b
--- /dev/null
+++ b/configs/vpnmanager_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "VPNMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/wanmanager_log.json b/configs/wanmanager_log.json
new file mode 100644
index 0000000..7511896
--- /dev/null
+++ b/configs/wanmanager_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "WANMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/webpa.json b/configs/webpa.json
new file mode 100644
index 0000000..ea40e67
--- /dev/null
+++ b/configs/webpa.json
@@ -0,0 +1,43 @@
+{
+ "_comment":"2025-06-03T18:03:12 speedport: WEBPA.INFO [tid=12808] WEBPA: Received data ParamName X_RDKCENTRAL-COM_Connected-Client,data : Connected-Client,Other,40:61:86:e9:ce:51,Connected,PCBuero",
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "WEBPA\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/webpa_log.json b/configs/webpa_log.json
new file mode 100644
index 0000000..5f1090f
--- /dev/null
+++ b/configs/webpa_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "WEBPA\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
diff --git a/configs/wifi_log.json b/configs/wifi_log.json
new file mode 100644
index 0000000..14c495f
--- /dev/null
+++ b/configs/wifi_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "WIFI\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/xDSL_mgr.json b/configs/xDSL_mgr.json
new file mode 100644
index 0000000..525d2a9
--- /dev/null
+++ b/configs/xDSL_mgr.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "XDSLMANAGER\\.([\\w]+)": "",
+ "tid=\\d+\\]\\s*(.*)$": ""
+ }
+ },
+ "log_parser_config": {
+ "parsing_algorithm": "drain",
+ "parsing_algo_params": {
+ "sim_th": 0.5,
+ "depth": 5
+ }
+ },
+ "feature_extractor_config": {
+ "group_by_category": ["Level"],
+ "group_by_time": "1min"
+ },
+ "log_vectorizer_config": {
+ "algo_name": "word2vec"
+ },
+ "categorical_encoder_config": {
+ "name": "label_encoder"
+ },
+ "anomaly_detection_config": {
+ "algo_name": "one_class_svm"
+ }
+}
\ No newline at end of file
diff --git a/configs/xconf_log.json b/configs/xconf_log.json
new file mode 100644
index 0000000..559e061
--- /dev/null
+++ b/configs/xconf_log.json
@@ -0,0 +1,42 @@
+{
+ "data_loader_config": {
+ "infer_datetime": true,
+ "datetime_format": "%Y-%m-%dT%H:%M:%S",
+ "reader_args": {
+ "log_format": " "
+ },
+ "dimensions": {
+ "timestamp": ["DateTime"],
+ "body": ["Content"],
+ "attributes": ["Level"]
+ }
+ },
+ "preprocessor_config": {
+ "custom_delimiters_regex":[],
+ "custom_replace_list": {
+ "^([\\d\\-\\d\\-\\w:\\d:\\d]+)": "",
+ "XCONF\\.([\\w]+)": "