diff --git a/.gitignore b/.gitignore
index 3f374b125..5fc30282a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -22,11 +22,15 @@ build/
.env.local
.env.*.local
src/.env
-*.pem
+src/publishers/crypto
+!src/publishers/crypto/README.md
*.key
*.log
*.crt
*.asc
+*.pub
+*.pem
+*.p12
local/
# settings of editors
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 15567a065..8b4b4cc3d 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,18 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
## [v23.09.1] - 2023-09-27
+### Breaking change! (Only for docker users with customized or new report templates)
+
+**Background:**
+Presenter docker maps `/app/templates` directory to `presenters_templates` container.
+This override original `/app/templates` files with user modifications.
+When new or updated templates arrive with new version, they stay hidden due this mapping.
+
+**Solution:**
+Remap user changes to `/app/templates/user_templates` directory.
+There is need then just update old template path in `Configuration / Product Types`:
+e.g. `/app/templates/file.html` -> `/app/templates/user_templates/file.html`
+
### Added
* New reports (OSINT, Disinfo, Offensive, Weekly)
* Keycloak authentication support
@@ -31,6 +43,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/).
* Fixed bots crash, better Regex
* Added missing TOR binary to the collectors
* Improved templates
+* Fixed bug when new templates stay hiden due wrong docker mapping
* A lot of various fixes
Thanks for the contributions: @sebix, @multiflexi
diff --git a/docker/Dockerfile.bots b/docker/Dockerfile.bots
index fc2ec693c..a989a105c 100644
--- a/docker/Dockerfile.bots
+++ b/docker/Dockerfile.bots
@@ -1,4 +1,4 @@
-FROM python:3.7-alpine3.14 AS build_shared
+FROM python:3.12-alpine3.18 AS build_shared
WORKDIR /build_shared/
@@ -8,7 +8,7 @@ RUN python -m build
-FROM python:3.7-alpine3.14 AS production
+FROM python:3.12-alpine3.18 AS production
WORKDIR /app/
diff --git a/docker/Dockerfile.collectors b/docker/Dockerfile.collectors
index a77621c42..edc46df14 100644
--- a/docker/Dockerfile.collectors
+++ b/docker/Dockerfile.collectors
@@ -1,4 +1,4 @@
-FROM python:3.7-alpine3.14 AS build_shared
+FROM python:3.12-alpine3.18 AS build_shared
WORKDIR /build_shared/
@@ -8,7 +8,7 @@ RUN python -m build
-FROM python:3.7-alpine3.14 AS production
+FROM python:3.12-alpine3.18 AS production
WORKDIR /app/
diff --git a/docker/Dockerfile.presenters b/docker/Dockerfile.presenters
index 8f1b61026..27737392e 100644
--- a/docker/Dockerfile.presenters
+++ b/docker/Dockerfile.presenters
@@ -1,4 +1,4 @@
-FROM python:3.7-alpine3.14 AS build_shared
+FROM python:3.12-alpine3.18 AS build_shared
WORKDIR /build_shared/
@@ -8,7 +8,7 @@ RUN python -m build
-FROM python:3.7-alpine3.14 AS production
+FROM python:3.12-alpine3.18 AS production
WORKDIR /app/
@@ -20,59 +20,15 @@ RUN \
apk add --no-cache \
libpng \
libjpeg \
- wkhtmltopdf
+ py3-gobject3 \
+ pango
# install fonts
RUN \
apk add --no-cache \
msttcorefonts-installer \
fontconfig \
- font-noto \
- font-noto-adlam \
- font-noto-adlamunjoined \
- font-noto-arabic \
- font-noto-armenian \
- font-noto-avestan \
- font-noto-bamum \
- font-noto-bengali \
- font-noto-buhid \
- font-noto-carian \
- font-noto-chakma \
- font-noto-cherokee \
- font-noto-cypriot \
- font-noto-deseret \
- font-noto-devanagari \
- font-noto-ethiopic \
- font-noto-extra \
- font-noto-georgian \
- font-noto-glagolitic \
- font-noto-gothic \
- font-noto-gujarati \
- font-noto-gurmukhi \
- font-noto-hebrew \
- font-noto-kannada \
- font-noto-kayahli \
- font-noto-khmer \
- font-noto-lao \
- font-noto-lisu \
- font-noto-malayalam \
- font-noto-mandaic \
- font-noto-myanmar \
- font-noto-nko \
- font-noto-olchiki \
- font-noto-oldturkic \
- font-noto-oriya \
- font-noto-osage \
- font-noto-osmanya \
- font-noto-shavian \
- font-noto-sinhala \
- font-noto-tamil \
- font-noto-telugu \
- font-noto-thaana \
- font-noto-thai \
- font-noto-tibetan \
- font-noto-tifinagh \
- font-noto-vai \
+ font-noto-all \
terminus-font \
ttf-opensans \
font-bakoma \
diff --git a/docker/Dockerfile.publishers b/docker/Dockerfile.publishers
index 5ee5a7ead..c4f50e78c 100644
--- a/docker/Dockerfile.publishers
+++ b/docker/Dockerfile.publishers
@@ -1,4 +1,4 @@
-FROM python:3.9-alpine3.17 AS build_shared
+FROM python:3.12-alpine3.18 AS build_shared
WORKDIR /build_shared/
@@ -8,7 +8,7 @@ RUN python -m build
-FROM python:3.9-alpine3.17 AS production
+FROM python:3.12-alpine3.18 AS production
WORKDIR /app/
@@ -25,9 +25,9 @@ RUN pip install --no-cache-dir ./custom_packages/taranis_ng_shared-*.whl && rm -
COPY ./src/publishers/requirements.txt /app/requirements.txt
RUN apk add --no-cache \
- swig\
+ swig \
libmagic \
- gnupg
+ gnupg
RUN \
apk add --no-cache --virtual .build-deps build-base \
diff --git a/docker/README.md b/docker/README.md
index 571783ca0..f03ff8add 100644
--- a/docker/README.md
+++ b/docker/README.md
@@ -37,13 +37,17 @@ entrypoint, and the [gunicorn](https://gunicorn.org/) configuration file.
## Prerequisites
- [Docker](https://docs.docker.com/engine/install/)
-- [docker-compose](https://docs.docker.com/compose/install/) >= 1.27.0
+- [docker-compose](https://docs.docker.com/compose/install/) >= 1.27.0 (In July 2023, Compose V1 has been deprecated)
+
+or
+
+- [Compose V2](https://docs.docker.com/compose/migrate/), which is part of standard Docker Engine installation
- (Optional) [Vim](https://www.vim.org/) or other text editor - for configuration and development
Please note it is important to use the abovementioned version of
`docker-compose` or newer, otherwise the build and deploy will fail.
-## Quickly build and run Taranis NG using `docker-compose`
+## Quickly build and run Taranis NG using `docker-compose` or `docker compose`
_First_, you need to clone the source code repository:
@@ -66,12 +70,21 @@ _Finally_, either deploy the ready-made images from Docker hub with:
docker-compose -f docker/docker-compose.yml pull
docker-compose -f docker/docker-compose.yml up --no-build
```
+or
+```bash
+docker compose -f docker/docker-compose.yml pull
+docker compose -f docker/docker-compose.yml up --no-build
+```
or, alternatively, build and run the containers with:
```bash
TARANIS_NG_TAG=build docker-compose -f docker/docker-compose.yml up --build --pull
```
+or
+```bash
+TARANIS_NG_TAG=build docker compose -f docker/docker-compose.yml up --build --pull
+```
(`--pull` updates the base images)
**Voila, Taranis NG is up and running. Visit your instance by navigating to
diff --git a/docker/docker-compose-keycloak.yml b/docker/docker-compose-keycloak.yml
index d8d702964..eb7ff2b23 100644
--- a/docker/docker-compose-keycloak.yml
+++ b/docker/docker-compose-keycloak.yml
@@ -1,68 +1,15 @@
version: "3.9"
services:
- redis:
- image: "redis:${REDIS_TAG}"
- restart: unless-stopped
- environment:
- TZ: "${TZ}"
- volumes:
- - "redis_conf:/usr/local/etc/redis"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
- database:
- image: "postgres:${POSTGRES_TAG}"
- restart: unless-stopped
- environment:
- POSTGRES_DB: "taranis-ng"
- POSTGRES_USER: "taranis-ng"
- POSTGRES_PASSWORD: "${POSTGRES_PASSWORD}"
- TZ: "${TZ}"
- PGTZ: "${TZ}"
- command: ["postgres", "-c", "shared_buffers=${DB_SHARED_BUFFERS}", "-c", "max_connections=${DB_MAX_CONNECTIONS}"]
- volumes:
- - "database_data:/var/lib/postgresql/data"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
core:
- depends_on:
- - "redis"
- - "database"
- restart: unless-stopped
- image: "skcert/taranis-ng-core:${TARANIS_NG_TAG}"
- build:
- context: ..
- dockerfile: ./docker/Dockerfile.core
- args:
- HTTP_PROXY: "${HTTP_PROXY}"
- HTTPS_PROXY: "${HTTPS_PROXY}"
- http_proxy: "${HTTP_PROXY}"
- https_proxy: "${HTTPS_PROXY}"
environment:
- REDIS_URL: "redis://redis"
- DB_URL: "database"
- DB_DATABASE: "taranis-ng"
- DB_USER: "taranis-ng"
- DB_PASSWORD: "${POSTGRES_PASSWORD}"
- DB_POOL_SIZE: 100
- DB_POOL_RECYCLE: 300
- DB_POOL_TIMEOUT: 30
+ TARANIS_NG_AUTHENTICATOR: "keycloak"
- JWT_SECRET_KEY: "${JWT_SECRET_KEY}"
OPENID_LOGOUT_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/keycloak/auth/realms/taranis-ng/protocol/openid-connect/logout?redirect_uri=GOTO_URL"
TARANIS_NG_KEYCLOAK_INTERNAL_URL: "http://keycloak:8080"
TARANIS_NG_KEYCLOAK_REALM: "taranis-ng"
TARANIS_NG_KEYCLOAK_CLIENT_ID: "taranis-ng"
TARANIS_NG_KEYCLOAK_CLIENT_SECRET: "supersecret"
- TARANIS_NG_AUTHENTICATOR: "keycloak"
KEYCLOAK_USER_MANAGEMENT: "true"
KEYCLOAK_SERVER_URL: "http://keycloak:8080"
KEYCLOAK_ADMIN_USERNAME: "admin"
@@ -70,220 +17,8 @@ services:
KEYCLOAK_REALM_NAME: "taranis-ng"
KEYCLOAK_CLIENT_SECRET_KEY: "supersecret"
KEYCLOAK_VERIFY: "true"
- WORKERS_PER_CORE: "1"
-
- CVE_UPDATE_FILE: "${CVE_UPDATE_FILE}"
- CPE_UPDATE_FILE: "${CPE_UPDATE_FILE}"
-
- TZ: "${TZ}"
- DEBUG: "true"
- DEBUG_SQL: "false"
- # to allow automatic initialisation of collectors/presenters/publishers
- COLLECTOR_PRESENTER_PUBLISHER_API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}"
- labels:
- traefik.enable: "true"
- traefik.http.services.taranis-api.loadbalancer.server.port: "80"
-
- traefik.http.routers.taranis-api-443.entrypoints: "websecure"
- traefik.http.routers.taranis-api-443.rule: "PathPrefix(`/api/`)"
- traefik.http.routers.taranis-api-443.tls: "true"
- traefik.http.routers.taranis-api-443.tls.domains[0].main: "${TARANIS_NG_HOSTNAME}"
- traefik.http.routers.taranis-api-443.service: "taranis-api"
-
- traefik.http.routers.taranis-sse-443.entrypoints: "websecure"
- traefik.http.routers.taranis-sse-443.rule: "PathPrefix(`/sse`)"
- traefik.http.routers.taranis-sse-443.tls: "true"
- traefik.http.routers.taranis-sse-443.tls.domains[0].main: "${TARANIS_NG_HOSTNAME}"
- traefik.http.routers.taranis-sse-443.service: "taranis-api"
-
- volumes:
- - "core_data:/data"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
- bots:
- depends_on:
- core:
- condition: service_healthy
- image: "skcert/taranis-ng-bots:${TARANIS_NG_TAG}"
- build:
- context: ..
- dockerfile: ./docker/Dockerfile.bots
- args:
- HTTP_PROXY: "${HTTP_PROXY}"
- HTTPS_PROXY: "${HTTPS_PROXY}"
- http_proxy: "${HTTP_PROXY}"
- https_proxy: "${HTTPS_PROXY}"
- environment:
- API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}"
- TARANIS_NG_CORE_URL: "http://core"
- TARANIS_NG_CORE_SSE: "http://core/sse"
- WORKERS_PER_CORE: "1"
- TZ: "${TZ}"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
- collectors:
- depends_on:
- core:
- condition: service_healthy
- restart: unless-stopped
- image: "skcert/taranis-ng-collectors:${TARANIS_NG_TAG}"
- build:
- context: ..
- dockerfile: ./docker/Dockerfile.collectors
- args:
- HTTP_PROXY: "${HTTP_PROXY}"
- HTTPS_PROXY: "${HTTPS_PROXY}"
- http_proxy: "${HTTP_PROXY}"
- https_proxy: "${HTTPS_PROXY}"
- environment:
- TARANIS_NG_CORE_URL: "http://core"
- API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}"
- WORKERS_PER_CORE: "1"
- DEBUG: "true"
- TZ: "${TZ}"
- volumes:
- - "collector_storage:/app/storage"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
- presenters:
- depends_on:
- core:
- condition: service_healthy
- restart: unless-stopped
- image: "skcert/taranis-ng-presenters:${TARANIS_NG_TAG}"
- build:
- context: ..
- dockerfile: ./docker/Dockerfile.presenters
- args:
- HTTP_PROXY: "${HTTP_PROXY}"
- HTTPS_PROXY: "${HTTPS_PROXY}"
- http_proxy: "${HTTP_PROXY}"
- https_proxy: "${HTTPS_PROXY}"
- environment:
- TARANIS_NG_CORE_URL: "http://core"
- API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}"
- WORKERS_PER_CORE: "1"
- TZ: "${TZ}"
- ports:
- - "${PRESENTER_PORT}:80"
- volumes:
- - "presenters_templates:/app/templates"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
- publishers:
- depends_on:
- core:
- condition: service_healthy
- restart: unless-stopped
- image: "skcert/taranis-ng-publishers:${TARANIS_NG_TAG}"
- build:
- context: ..
- dockerfile: ./docker/Dockerfile.publishers
- args:
- HTTP_PROXY: "${HTTP_PROXY}"
- HTTPS_PROXY: "${HTTPS_PROXY}"
- http_proxy: "${HTTP_PROXY}"
- https_proxy: "${HTTPS_PROXY}"
- environment:
- TARANIS_NG_CORE_URL: "http://core"
- API_KEY: "${COLLECTOR_PRESENTER_PUBLISHER_API_KEY}"
- WORKERS_PER_CORE: "1"
- TZ: "${TZ}"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
gui:
- depends_on:
- - "core"
- restart: unless-stopped
- image: "skcert/taranis-ng-gui:${TARANIS_NG_TAG}"
- build:
- context: ..
- dockerfile: ./docker/Dockerfile.gui
- args:
- HTTP_PROXY: "${HTTP_PROXY}"
- HTTPS_PROXY: "${HTTPS_PROXY}"
- http_proxy: "${HTTP_PROXY}"
- https_proxy: "${HTTPS_PROXY}"
-# ports:
-# - "8080:80"
environment:
- NGINX_WORKERS: "4"
- NGINX_CONNECTIONS: "16"
- VUE_APP_TARANIS_NG_URL: "${TARANIS_NG_HTTPS_URI}"
- VUE_APP_TARANIS_NG_CORE_API: "${TARANIS_NG_HTTPS_URI}/api/v1"
- VUE_APP_TARANIS_NG_CORE_SSE: "${TARANIS_NG_HTTPS_URI}/sse"
- VUE_APP_TARANIS_NG_LOCALE: en
VUE_APP_TARANIS_NG_LOGOUT_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/auth/logout?gotoUrl=TARANIS_GUI_URI"
VUE_APP_TARANIS_NG_LOGIN_URL: "${TARANIS_NG_HTTPS_URI}/api/v1/keycloak/auth/realms/taranis-ng/protocol/openid-connect/auth?response_type=code&client_id=taranis-ng&redirect_uri=TARANIS_GUI_URI"
- TZ: "${TZ}"
- labels:
- traefik.enable: "true"
- traefik.http.services.taranis-gui.loadbalancer.server.port: "80"
-
- traefik.http.middlewares.redirect-to-443.redirectscheme.scheme: "https"
- traefik.http.middlewares.redirect-to-443.redirectscheme.port: "${TARANIS_NG_HTTPS_PORT}"
-
- traefik.http.routers.taranis-gui-80.entrypoints: "web"
- traefik.http.routers.taranis-gui-80.rule: "PathPrefix(`/`)"
- traefik.http.routers.taranis-gui-80.middlewares: "redirect-to-443"
-
- traefik.http.routers.taranis-gui-443.entrypoints: "websecure"
- traefik.http.routers.taranis-gui-443.rule: "PathPrefix(`/`)"
- traefik.http.routers.taranis-gui-443.tls: "true"
- traefik.http.routers.taranis-gui-443.tls.domains[0].main: "${TARANIS_NG_HOSTNAME}"
- traefik.http.routers.taranis-gui-443.service: "taranis-gui"
-
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
- traefik:
- depends_on:
- - "gui"
- - "core"
- restart: unless-stopped
- image: "traefik:latest"
- environment:
- TZ: "${TZ}"
- ports:
- - "${TARANIS_NG_HTTP_PORT}:80"
- - "${TARANIS_NG_HTTPS_PORT}:443"
- - "${TRAEFIK_MANAGEMENT_PORT}:9090"
- volumes:
- - "/var/run/docker.sock:/var/run/docker.sock:ro"
- - "./traefik:/etc/traefik:ro"
- - "./tls:/opt/certs"
- logging:
- driver: "json-file"
- options:
- max-size: "200k"
- max-file: "10"
-
-volumes:
- redis_conf:
- database_data:
- core_data:
- presenters_templates:
- collector_storage:
diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml
index 16d10b53f..2f4be5755 100644
--- a/docker/docker-compose.yml
+++ b/docker/docker-compose.yml
@@ -97,6 +97,7 @@ services:
depends_on:
core:
condition: service_healthy
+ restart: unless-stopped
image: "skcert/taranis-ng-bots:${TARANIS_NG_TAG}"
build:
context: ..
@@ -168,7 +169,7 @@ services:
ports:
- "${PRESENTER_PORT}:80"
volumes:
- - "presenters_templates:/app/templates"
+ - "presenters_templates:/app/templates/user_templates"
logging:
driver: "json-file"
options:
diff --git a/src/bots/managers/auth_manager.py b/src/bots/managers/auth_manager.py
index c7d94a288..24b59d938 100644
--- a/src/bots/managers/auth_manager.py
+++ b/src/bots/managers/auth_manager.py
@@ -1,11 +1,16 @@
+"""Authorization manager for the API.
+
+Returns:
+ wrapper: Wrapper function for the API endpoints.
+"""
from functools import wraps
from flask import request
import os
import ssl
-api_key = os.getenv('API_KEY')
+api_key = os.getenv("API_KEY")
-if os.getenv('SSL_VERIFICATION') == "False":
+if os.getenv("SSL_VERIFICATION") == "False":
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
@@ -15,11 +20,18 @@
def api_key_required(fn):
+ """Check for API key in the request header.
+
+ Arguments:
+ fn -- The function to be decorated.
+ Returns:
+ wrapper: Wrapper function for the API endpoints.
+ """
+
@wraps(fn)
def wrapper(*args, **kwargs):
-
- if not request.headers.has_key('Authorization') or request.headers['Authorization'] != ('Bearer ' + api_key):
- return {'error': 'not authorized'}, 401
+ if "Authorization" not in request.headers.keys() or request.headers["Authorization"] != ("Bearer " + api_key):
+ return {"error": "not authorized"}, 401
else:
return fn(*args, **kwargs)
diff --git a/src/bots/managers/log_manager.py b/src/bots/managers/log_manager.py
index 5f3dffb0c..3ee01c657 100644
--- a/src/bots/managers/log_manager.py
+++ b/src/bots/managers/log_manager.py
@@ -83,6 +83,9 @@ def log_critical(message):
sys_logger = None
log_debug("Unable to connect to syslog server!")
log_debug(ex)
+else:
+ # disable log duplicities on the screen if we have SYSLOG disabled
+ sys_logger = None
def log_system_activity(module, message):
log_info("[{}] {}".format(module, message))
diff --git a/src/bots/requirements.txt b/src/bots/requirements.txt
index 4139b7a7e..bc21d0327 100644
--- a/src/bots/requirements.txt
+++ b/src/bots/requirements.txt
@@ -1,26 +1,13 @@
-certifi==2019.11.28
-Flask==1.1.4
-Flask-Cors==3.0.10
-Flask-RESTful==0.3.7
-gevent==21.8.0
-greenlet==1.1.1
-gunicorn==20.0.4
-idna==2.8
-marshmallow==3.18.0
+Flask==3.0.0
+Flask-Cors==4.0.0
+Flask-RESTful==0.3.10
+gevent==23.9.1
+gunicorn==21.2.0
+marshmallow==3.20.1
marshmallow-enum==1.5.1
-Jinja2==2.11.3
-MarkupSafe==1.1.0
-oauthlib==3.1.0
+oauthlib==3.2.2
PySocks==1.7.1
-python-dateutil==2.8.1
-python-dotenv==0.10.5
-pytz==2019.3
-requests==2.26.0
-requests-oauthlib==1.3.0
-schedule==0.6.0
-six==1.14.0
-sseclient-py==1.7
-tweepy==3.8.0
-urllib3==1.26.7
-Werkzeug==0.16.0
-zipp==3.1.0
+python-dotenv==1.0.0
+requests==2.31.0
+schedule==1.2.1
+sseclient-py==1.8.0
diff --git a/src/collectors/collectors/atom_collector.py b/src/collectors/collectors/atom_collector.py
index 8edcf2d03..73713c7dc 100644
--- a/src/collectors/collectors/atom_collector.py
+++ b/src/collectors/collectors/atom_collector.py
@@ -1,12 +1,14 @@
import datetime
import hashlib
import uuid
+import traceback
import feedparser
import requests
from bs4 import BeautifulSoup
from dateutil.parser import parse
from .base_collector import BaseCollector
+from managers import log_manager
from shared.schema.news_item import NewsItemData
from shared.schema.parameter import Parameter, ParameterType
@@ -30,6 +32,7 @@ def collect(self, source):
feed_url = source.parameter_values['ATOM_FEED_URL']
user_agent = source.parameter_values['USER_AGENT']
interval = source.parameter_values['REFRESH_INTERVAL']
+ log_manager.log_collector_activity("atom", source.name, "Starting collector for url: {}".format(feed_url))
proxies = {}
if 'PROXY_SERVER' in source.parameter_values:
@@ -50,16 +53,14 @@ def collect(self, source):
news_items = []
+ limit = BaseCollector.history(interval)
for feed_entry in feed['entries']:
-
- limit = BaseCollector.history(interval)
published = feed_entry['updated']
published = parse(published, tzinfos=BaseCollector.timezone_info())
-
if str(published) > str(limit):
link_for_article = feed_entry['link']
-
+ log_manager.log_collector_activity("atom", source.name, "Processing entry [{}]".format(link_for_article))
if proxies:
page = requests.get(link_for_article, headers={'User-Agent': user_agent}, proxies=proxies)
else:
@@ -85,4 +86,8 @@ def collect(self, source):
BaseCollector.publish(news_items, source)
except Exception as error:
+ log_manager.log_collector_activity("atom", source.name, "ATOM collection exceptionally failed")
BaseCollector.print_exception(source, error)
+ log_manager.log_debug(traceback.format_exc())
+
+ log_manager.log_debug("{} collection finished.".format(self.type))
diff --git a/src/collectors/collectors/rss_collector.py b/src/collectors/collectors/rss_collector.py
index dd8bf3517..5c81f5440 100644
--- a/src/collectors/collectors/rss_collector.py
+++ b/src/collectors/collectors/rss_collector.py
@@ -1,3 +1,4 @@
+"""RSS collector module."""
import datetime
import hashlib
import uuid
@@ -17,13 +18,19 @@
class RSSCollector(BaseCollector):
+ """RSS collector class.
+
+ Arguments:
+ BaseCollector -- Base collector class.
+ """
+
type = "RSS_COLLECTOR"
name = "RSS Collector"
description = "Collector for gathering data from RSS feeds"
parameters = [
Parameter(0, "FEED_URL", "Feed URL", "Full url for RSS feed", ParameterType.STRING),
- Parameter(0, "USER_AGENT", "User agent", "Type of user agent", ParameterType.STRING)
+ Parameter(0, "USER_AGENT", "User agent", "Type of user agent", ParameterType.STRING),
]
parameters.extend(BaseCollector.parameters)
@@ -32,45 +39,52 @@ class RSSCollector(BaseCollector):
@BaseCollector.ignore_exceptions
def collect(self, source):
+ """Collect data from RSS feed.
- feed_url = source.parameter_values['FEED_URL']
- interval = source.parameter_values['REFRESH_INTERVAL']
+ Arguments:
+ source -- Source object.
+ """
+ feed_url = source.parameter_values["FEED_URL"]
+ # interval = source.parameter_values["REFRESH_INTERVAL"]
- log_manager.log_collector_activity('rss', source.name, 'Starting collector for url: {}'.format(feed_url))
+ log_manager.log_collector_activity("rss", source.name, "Starting collector for url: {}".format(feed_url))
- user_agent = source.parameter_values['USER_AGENT']
+ user_agent = source.parameter_values["USER_AGENT"]
if user_agent:
feedparser.USER_AGENT = user_agent
- user_agent_headers = {'User-Agent': user_agent}
+ # user_agent_headers = {"User-Agent": user_agent}
else:
- user_agent_headers = { }
+ # user_agent_headers = {}
+ pass
# use system proxy
proxy_handler = None
opener = urllib.request.urlopen
- if 'PROXY_SERVER' in source.parameter_values:
- proxy_server = source.parameter_values['PROXY_SERVER']
+ if "PROXY_SERVER" in source.parameter_values:
+ proxy_server = source.parameter_values["PROXY_SERVER"]
# disable proxy - do not use system proxy
- if proxy_server == 'none':
+ if proxy_server == "none":
proxy_handler = urllib.request.ProxyHandler({})
else:
proxy = re.search(r"^(http|https|socks4|socks5)://([a-zA-Z0-9\-\.\_]+):(\d+)/?$", proxy_server)
if proxy:
scheme, host, port = proxy.groups()
# classic HTTP/HTTPS proxy
- if scheme in ['http', 'https']:
- proxy_handler = urllib.request.ProxyHandler({
- 'http': '{}://{}:{}'.format(scheme, host, port),
- 'https': '{}://{}:{}'.format(scheme, host, port),
- 'ftp': '{}://{}:{}'.format(scheme, host, port)
- })
+ if scheme in ["http", "https"]:
+ proxy_handler = urllib.request.ProxyHandler(
+ {
+ "http": "{}://{}:{}".format(scheme, host, port),
+ "https": "{}://{}:{}".format(scheme, host, port),
+ "ftp": "{}://{}:{}".format(scheme, host, port),
+ }
+ )
# socks4 proxy
- elif scheme == 'socks4':
+ elif scheme == "socks4":
proxy_handler = SocksiPyHandler(socks.SOCKS4, host, int(port))
# socks5 proxy
- elif scheme == 'socks5':
+ elif scheme == "socks5":
proxy_handler = SocksiPyHandler(socks.SOCKS5, host, int(port))
# use proxy in urllib
@@ -79,63 +93,72 @@ def collect(self, source):
try:
if proxy_handler:
- feed = feedparser.parse(feed_url, handlers = [proxy_handler])
+ feed = feedparser.parse(feed_url, handlers=[proxy_handler])
else:
feed = feedparser.parse(feed_url)
- log_manager.log_collector_activity('rss', source.name, 'RSS returned feed with {} entries'.format(len(feed['entries'])))
+ log_manager.log_collector_activity("rss", source.name, "RSS returned feed with {} entries".format(len(feed["entries"])))
news_items = []
- for feed_entry in feed['entries']:
-
- for key in ['author', 'published', 'title', 'description', 'link']:
- if not feed_entry.has_key(key):
- feed_entry[key] = ''
+ for feed_entry in feed["entries"]:
+ for key in ["author", "published", "title", "description", "link"]:
+ if key not in feed_entry.keys():
+ feed_entry[key] = ""
- limit = BaseCollector.history(interval)
- published = feed_entry['published']
- published = dateparser.parse(published, settings={'DATE_ORDER': 'DMY'})
+ # limit = BaseCollector.history(interval)
+ published = feed_entry["published"]
+ published = dateparser.parse(published, settings={"DATE_ORDER": "DMY"})
# if published > limit: TODO: uncomment after testing, we need some initial data now
- link_for_article = feed_entry['link']
+ link_for_article = feed_entry["link"]
if not link_for_article:
log_manager.log_collector_activity("rss", source.name, "Skipping (empty link)")
continue
- log_manager.log_collector_activity('rss', source.name, 'Processing entry [{}]'.format(link_for_article))
+ log_manager.log_collector_activity("rss", source.name, "Processing entry [{}]".format(link_for_article))
- html_content = ''
+ html_content = ""
request = urllib.request.Request(link_for_article)
- request.add_header('User-Agent', user_agent)
+ request.add_header("User-Agent", user_agent)
with opener(request) as response:
html_content = response.read()
- soup = BeautifulSoup(html_content, features='html.parser')
+ soup = BeautifulSoup(html_content, features="html.parser")
- content = ''
+ content = ""
if html_content:
- content_text = [p.text.strip() for p in soup.findAll('p')]
- replaced_str = '\xa0'
+ content_text = [p.text.strip() for p in soup.findAll("p")]
+ replaced_str = "\xa0"
if replaced_str:
- content = [w.replace(replaced_str, ' ') for w in content_text]
- content = ' '.join(content)
-
- for_hash = feed_entry['author'] + feed_entry['title'] + feed_entry['link']
-
- news_item = NewsItemData(uuid.uuid4(), hashlib.sha256(for_hash.encode()).hexdigest(),
- feed_entry['title'], feed_entry['description'], feed_url, feed_entry['link'],
- feed_entry['published'], feed_entry['author'], datetime.datetime.now(),
- content, source.id, [])
+ content = [w.replace(replaced_str, " ") for w in content_text]
+ content = " ".join(content)
+
+ for_hash = feed_entry["author"] + feed_entry["title"] + feed_entry["link"]
+
+ news_item = NewsItemData(
+ uuid.uuid4(),
+ hashlib.sha256(for_hash.encode()).hexdigest(),
+ feed_entry["title"],
+ feed_entry["description"],
+ feed_url,
+ feed_entry["link"],
+ feed_entry["published"],
+ feed_entry["author"],
+ datetime.datetime.now(),
+ content,
+ source.id,
+ [],
+ )
news_items.append(news_item)
BaseCollector.publish(news_items, source)
except Exception as error:
- log_manager.log_collector_activity('rss', source.name, 'RSS collection exceptionally failed')
+ log_manager.log_collector_activity("rss", source.name, "RSS collection exceptionally failed")
BaseCollector.print_exception(source, error)
log_manager.log_debug(traceback.format_exc())
diff --git a/src/collectors/collectors/web_collector.py b/src/collectors/collectors/web_collector.py
index 8151e7744..2f14d401c 100644
--- a/src/collectors/collectors/web_collector.py
+++ b/src/collectors/collectors/web_collector.py
@@ -12,7 +12,9 @@
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from selenium.webdriver.chrome.options import Options as ChromeOptions
+from selenium.webdriver.chrome.service import Service as ChromeService
from selenium.webdriver.firefox.options import Options as FirefoxOptions
+from selenium.webdriver.firefox.service import Service as FirefoxService
from urllib.parse import urlparse
import os
import dateparser
@@ -137,17 +139,17 @@ def __find_element_by(driver, element_selector):
element = None
if prefix == 'id':
- element = driver.find_element_by_id(selector)
+ element = driver.find_element(By.ID, selector)
if prefix == 'name':
- element = driver.find_element_by_name(selector)
+ element = driver.find_element(By.NAME, selector)
elif prefix == 'xpath':
- element = driver.find_element_by_xpath(selector)
+ element = driver.find_element(By.XPATH, selector)
elif prefix in [ 'tag_name', 'tag' ]:
- element = driver.find_element_by_tag_name(selector)
+ element = driver.find_element(By.TAG_NAME, selector)
elif prefix in [ 'class_name', 'class' ]:
- element = driver.find_element_by_class_name(selector)
+ element = driver.find_element(By.CLASS_NAME, selector)
elif prefix in [ 'css_selector', 'css' ]:
- element = driver.find_element_by_css_selector(selector)
+ element = driver.find_element(By.CSS_SELECTOR, selector)
return element
@@ -177,17 +179,17 @@ def __find_elements_by(driver, element_selector):
elements = None
if prefix == 'id':
- elements = [ driver.find_element_by_id(selector) ]
+ elements = [ driver.find_element(By.ID, selector) ]
if prefix == 'name':
- elements = driver.find_elements_by_name(selector)
+ elements = driver.find_element(By.NAME, selector)
elif prefix == 'xpath':
- elements = driver.find_elements_by_xpath(selector)
+ elements = driver.find_element(By.XPATH, selector)
elif prefix in [ 'tag_name', 'tag' ]:
- elements = driver.find_elements_by_tag_name(selector)
+ elements = driver.find_elements(By.TAG_NAME, selector)
elif prefix in [ 'class_name', 'class' ]:
- elements = driver.find_elements_by_class_name(selector)
+ elements = driver.find_element(By.CLASS_NAME, selector)
elif prefix in [ 'css_selector', 'css' ]:
- elements = driver.find_elements_by_css_selector(selector)
+ elements = driver.find_elements(By.CSS_SELECTOR, selector)
return elements
@staticmethod
@@ -368,12 +370,12 @@ def __get_headless_driver_chrome(self):
chrome_options.add_argument("--headless")
chrome_options.add_argument('--ignore-certificate-errors')
chrome_options.add_argument('--incognito')
+ chrome_service = ChromeService(executable_path=chrome_driver_executable)
if self.user_agent:
chrome_options.add_argument('user-agent=' + self.user_agent)
if self.tor_service.lower() == 'yes':
socks_proxy = "socks5://127.0.0.1:9050"
chrome_options.add_argument('--proxy-server={}'.format(socks_proxy))
- driver = webdriver.Chrome(executable_path=chrome_driver_executable, options=chrome_options)
elif self.proxy:
webdriver.DesiredCapabilities.CHROME['proxy'] = {
"proxyType": "MANUAL",
@@ -381,10 +383,8 @@ def __get_headless_driver_chrome(self):
"ftpProxy": self.proxy,
"sslProxy": self.proxy
}
- driver = webdriver.Chrome(executable_path=chrome_driver_executable, options=chrome_options)
- else:
- driver = webdriver.Chrome(executable_path=chrome_driver_executable, options=chrome_options)
-
+
+ driver = webdriver.Chrome(service=chrome_service, options=chrome_options)
log_manager.log_debug('Chrome driver initialized.')
return driver
@@ -406,30 +406,26 @@ def __get_headless_driver_firefox(self):
if self.user_agent:
firefox_options.add_argument('user-agent=' + self.user_agent)
- profile = webdriver.FirefoxProfile()
- firefox_capabilities = webdriver.DesiredCapabilities.FIREFOX
- firefox_capabilities['marionette'] = True
-
if self.tor_service.lower() == 'yes':
- profile.set_preference('network.proxy.type', 1) # manual proxy config
- profile.set_preference('network.proxy.socks', '127.0.0.1')
- profile.set_preference('network.proxy.socks_port', 9050)
- profile.set_preference('network.proxy.no_proxies_on', f'localhost, ::1, 127.0.0.1, {core_url_host}, 127.0.0.0/8');
+ firefox_options.set_preference('network.proxy.type', 1) # manual proxy config
+ firefox_options.set_preference('network.proxy.socks', '127.0.0.1')
+ firefox_options.set_preference('network.proxy.socks_port', 9050)
+ firefox_options.set_preference('network.proxy.no_proxies_on', f'localhost, ::1, 127.0.0.1, {core_url_host}, 127.0.0.0/8');
elif self.proxy:
- profile.set_preference('network.proxy.type', 1) # manual proxy config
- profile.set_preference('network.proxy.http', self.proxy_host)
- profile.set_preference('network.proxy.http_port', int(self.proxy_port))
- profile.set_preference('network.proxy.ssl', self.proxy_host)
- profile.set_preference('network.proxy.ssl_port', int(self.proxy_port))
- profile.set_preference('network.proxy.ftp', self.proxy)
- profile.set_preference('network.proxy.ftp_port', int(self.proxy_port))
- profile.set_preference('network.proxy.no_proxies_on', f'localhost, ::1, 127.0.0.1, {core_url_host}, 127.0.0.0/8');
+ firefox_options.set_preference('network.proxy.type', 1) # manual proxy config
+ firefox_options.set_preference('network.proxy.http', self.proxy_host)
+ firefox_options.set_preference('network.proxy.http_port', int(self.proxy_port))
+ firefox_options.set_preference('network.proxy.ssl', self.proxy_host)
+ firefox_options.set_preference('network.proxy.ssl_port', int(self.proxy_port))
+ firefox_options.set_preference('network.proxy.ftp', self.proxy)
+ firefox_options.set_preference('network.proxy.ftp_port', int(self.proxy_port))
+ firefox_options.set_preference('network.proxy.no_proxies_on', f'localhost, ::1, 127.0.0.1, {core_url_host}, 127.0.0.0/8');
else:
- profile.set_preference('network.proxy.type', 0) # no proxy
+ firefox_options.set_preference('network.proxy.type', 0) # no proxy
- profile.update_preferences()
- driver = webdriver.Firefox(profile, executable_path=firefox_driver_executable, options=firefox_options, capabilities=firefox_capabilities)
+ firefox_service = FirefoxService(executable_path=firefox_driver_executable)
+ driver = webdriver.Firefox(service=firefox_service, options=firefox_options)
log_manager.log_debug('Firefox driver initialized.')
return driver
@@ -518,8 +514,11 @@ def __browse_title_page(self, index_url):
popup = WebDriverWait(browser, 10).until(EC.presence_of_element_located(self.__get_element_locator(self.selectors['popup_close'])))
except Exception as ex:
log_manager.log_collector_activity('web', self.source.name, 'Popup find error: ' + traceback.format_exc())
- if popup:
- popup.click()
+ try:
+ if popup:
+ popup.click()
+ except Exception as ex:
+ log_manager.log_collector_activity('web', self.source.name, 'Popup click error: ' + traceback.format_exc())
# if there is a "load more" selector, click on it!
page = 1
diff --git a/src/collectors/managers/auth_manager.py b/src/collectors/managers/auth_manager.py
index c7d94a288..28d4ce145 100644
--- a/src/collectors/managers/auth_manager.py
+++ b/src/collectors/managers/auth_manager.py
@@ -1,11 +1,16 @@
+"""Authorization manager for the API.
+
+Returns:
+ wrapper: Wrapper function for the API endpoints.
+"""
from functools import wraps
from flask import request
import os
import ssl
-api_key = os.getenv('API_KEY')
+api_key = os.getenv("API_KEY")
-if os.getenv('SSL_VERIFICATION') == "False":
+if os.getenv("SSL_VERIFICATION") == "False":
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
@@ -15,11 +20,19 @@
def api_key_required(fn):
+ """Check for API key in the request header.
+
+ Arguments:
+ fn -- The function to be decorated.
+
+ Returns:
+ wrapper: Wrapper function for the API endpoints.
+ """
+
@wraps(fn)
def wrapper(*args, **kwargs):
-
- if not request.headers.has_key('Authorization') or request.headers['Authorization'] != ('Bearer ' + api_key):
- return {'error': 'not authorized'}, 401
+ if "Authorization" not in request.headers.keys() or request.headers["Authorization"] != ("Bearer " + api_key):
+ return {"error": "not authorized"}, 401
else:
return fn(*args, **kwargs)
diff --git a/src/collectors/managers/log_manager.py b/src/collectors/managers/log_manager.py
index 07a2f6db0..176860ba7 100644
--- a/src/collectors/managers/log_manager.py
+++ b/src/collectors/managers/log_manager.py
@@ -83,6 +83,9 @@ def log_critical(message):
sys_logger = None
log_warning("Unable to connect to syslog server!")
log_warning(ex)
+else:
+ # disable log duplicities on the screen if we have SYSLOG disabled
+ sys_logger = None
def log_system_activity(module, message):
log_info("[{}] {}".format(module, message))
diff --git a/src/collectors/requirements.txt b/src/collectors/requirements.txt
index 4352be63b..d7b1d1d38 100644
--- a/src/collectors/requirements.txt
+++ b/src/collectors/requirements.txt
@@ -1,30 +1,19 @@
-beautifulsoup4==4.8.1
-bleach==4.1.0
-certifi==2021.10.8
-feedparser==5.2.1
-Flask==1.1.4
-Flask-Cors==3.0.10
-Flask-RESTful==0.3.7
-gevent==21.8.0
-greenlet==1.1.1
-gunicorn==20.0.4
-lxml==4.6.5
-marshmallow==3.18.0
+beautifulsoup4==4.12.2
+bleach==6.1.0
+dateparser==1.2.0
+feedparser==6.0.10
+Flask==3.0.0
+Flask-Cors==4.0.0
+Flask-RESTful==0.3.10
+gevent==23.9.1
+gunicorn==21.2.0
+marshmallow==3.20.1
marshmallow-enum==1.5.1
-Jinja2==2.11.3
-MarkupSafe==1.1.0
-pyslack==0.5.0
PySocks==1.7.1
-python-dateutil==2.8.1
-python-dotenv==0.10.5
-pytz==2019.3
-requests==2.26.0
-schedule==0.6.0
-selenium==4.0.0
-six==1.14.0
-slackclient==1.0.7
-soupsieve==1.9.5
-tweepy==3.8.0
-Werkzeug==0.16.0
-zipp==3.1.0
-dateparser==1.1.1
+python-dateutil==2.8.2
+python-dotenv==1.0.0
+requests==2.31.0
+schedule==1.2.1
+selenium==4.15.2
+slackclient==1.3.2
+tweepy==4.14.0
diff --git a/src/core/README.md b/src/core/README.md
index c7ee28cc6..457b230ff 100644
--- a/src/core/README.md
+++ b/src/core/README.md
@@ -55,7 +55,9 @@ KEYCLOAK_REALM_NAME: "taranis-ng"
KEYCLOAK_USER_MANAGEMENT: "false"
```
-You can use and modify the existing `docker-compose-keycloak.yml` example in the repository.
+You can use and modify the existing `docker-compose-keycloak.yml` example in the repository and
+run with ```docker-compose -f docker-compose.yml -f docker-compose-keycloak.yml```
+
# **LDAP authentication**
If you prefer to authenticate users with LDAP, you need to set environment variables similarly to this:
diff --git a/src/core/managers/log_manager.py b/src/core/managers/log_manager.py
index 16b15acc2..c265b19ae 100644
--- a/src/core/managers/log_manager.py
+++ b/src/core/managers/log_manager.py
@@ -31,8 +31,6 @@
gunicorn_logger.setLevel(logging.DEBUG)
sys_logger.setLevel(logging.DEBUG)
-
-
# alter the sensitive value for logging, based on LOG_SENSITIVE_DATA env variable:
#
# LOG_SENSITIVE_DATA=no (or undefined) - remove sensitive data
@@ -59,7 +57,6 @@ def sensitive_value(value):
else:
return '•••••'
-
# used to decrypt the encrypted secrets from the logs
# source: https://github.com/gdavid7/cryptocode/blob/main/cryptocode.py
# TODO: add a command line wrapper around this function
@@ -106,8 +103,6 @@ def generate_escaped_data(request_data):
data = re.sub(r'(^\s+)|(\s+$)', '', data)
return data
-
-
# send a debug message
def log_debug(message):
formatted_message = "[{}] {}".format(module_id,message)
@@ -168,18 +163,18 @@ def log_critical(message):
sys_logger = None
log_debug("Unable to connect to syslog server!")
log_debug(ex)
-
+else:
+ # disable log duplicities on the screen if we have SYSLOG disabled
+ sys_logger = None
def resolve_ip_address():
headers_list = request.headers.getlist("X-Forwarded-For")
ip_address = headers_list[0] if headers_list else request.remote_addr
return ip_address
-
def resolve_method():
return request.method
-
def resolve_resource():
fp_len = len(request.full_path)
if request.full_path[fp_len - 1] == '?':
@@ -187,17 +182,14 @@ def resolve_resource():
else:
return request.full_path
-
def store_activity(activity_type, activity_detail, request_data = None):
LogRecord.store(resolve_ip_address(), None, None, None, None, module_id, activity_type, resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_user_activity(user, activity_type, activity_detail, request_data = None):
LogRecord.store(resolve_ip_address(), user.id, user.name, None, None, module_id, activity_type, resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_access_error_activity(user, activity_detail, request_data = None):
ip = resolve_ip_address()
log_text = "TARANIS NG Access Error (IP: {}, User ID: {}, User Name: {}, Method: {}, Resource: {}, Activity Detail: {}, Activity Data: {})".format(
@@ -220,7 +212,6 @@ def store_access_error_activity(user, activity_detail, request_data = None):
LogRecord.store(ip, user.id, user.name, None, None, module_id, "ACCESS_ERROR", resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_data_error_activity(user, activity_detail, request_data = None):
db.session.rollback()
ip = resolve_ip_address()
@@ -243,7 +234,6 @@ def store_data_error_activity(user, activity_detail, request_data = None):
LogRecord.store(ip, user.id, user.name, None, None, module_id, "DATA_ERROR", resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_data_error_activity_no_user(activity_detail, request_data = None):
db.session.rollback()
ip = resolve_ip_address()
@@ -264,7 +254,6 @@ def store_data_error_activity_no_user(activity_detail, request_data = None):
LogRecord.store(ip, None, None, None, None, module_id, "PUBLIC_ACCESS_DATA_ERROR", resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_auth_error_activity(activity_detail, request_data = None):
db.session.rollback()
log_text = "TARANIS NG Auth Error (Method: {}, Resource: {}, Activity Detail: {}, Activity Data: {})".format(
@@ -283,7 +272,6 @@ def store_auth_error_activity(activity_detail, request_data = None):
LogRecord.store(resolve_ip_address(), None, None, None, None, module_id, "AUTH_ERROR", resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_user_auth_error_activity(user, activity_detail, request_data = None):
db.session.rollback()
ip = resolve_ip_address()
@@ -304,12 +292,10 @@ def store_user_auth_error_activity(user, activity_detail, request_data = None):
LogRecord.store(ip, user.id, user.name, None, None, module_id, "AUTH_ERROR", resolve_resource(),
activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_system_activity(system_id, system_name, activity_type, activity_detail, request_data = None):
LogRecord.store(resolve_ip_address(), None, None, system_id, system_name, module_id, activity_type,
resolve_resource(), activity_detail, resolve_method(), generate_escaped_data(request_data))
-
def store_system_error_activity(system_id, system_name, activity_type, activity_detail, request_data = None):
db.session.rollback()
ip = resolve_ip_address()
diff --git a/src/core/migrations/versions/d776f47ce040_update_pdf_template_path.py b/src/core/migrations/versions/d776f47ce040_update_pdf_template_path.py
new file mode 100644
index 000000000..7357d6a3f
--- /dev/null
+++ b/src/core/migrations/versions/d776f47ce040_update_pdf_template_path.py
@@ -0,0 +1,83 @@
+"""Correct old presenter template details
+
+Revision ID: d776f47ce040
+Revises: 1c4eed243364
+Create Date: 2023-11-24 12:58:32.377642
+
+"""
+from alembic import op
+from sqlalchemy import orm, Column, ForeignKey, String, Integer, Boolean, text
+from sqlalchemy.ext.declarative import declarative_base
+import sqlalchemy as sa
+
+Base = declarative_base()
+
+# revision identifiers, used by Alembic.
+revision = 'd776f47ce040'
+down_revision = '1c4eed243364'
+branch_labels = None
+depends_on = None
+
+class Presenter_d776f47ce040(Base):
+ __tablename__ = 'presenter'
+ id = Column(String(64), primary_key=True)
+ type = Column(String, nullable=False)
+
+class PresenterParameter_d776f47ce040(Base):
+ __tablename__ = 'presenter_parameter'
+ presenter_id = Column(String(64), ForeignKey('presenter.id'), primary_key=True, nullable=False)
+ parameter_id = Column(Integer, ForeignKey('parameter.id'), primary_key=True, nullable=False)
+
+class Parameter_d776f47ce040(Base):
+ __tablename__ = 'parameter'
+ id = Column(Integer, primary_key=True, server_default=text("nextval('parameter_id_seq'::regclass)"))
+ key = Column(String, nullable=False)
+ name = Column(String, nullable=False)
+ description = Column(String)
+
+class ParameterValue_d776f47ce040(Base):
+ __tablename__ = 'parameter_value'
+ id = Column(Integer, primary_key=True, server_default=text("nextval('parameter_value_id_seq'::regclass)"))
+ value = Column(String, nullable=False)
+ parameter_id = Column(ForeignKey('parameter.id'))
+
+def upgrade():
+ bind = op.get_bind()
+ session = orm.Session(bind=bind)
+
+ # add cascade delete
+ delete_previous()
+ # parameter -> presenter_parameter
+ op.create_foreign_key('presenter_parameter_parameter_id_fkey', 'presenter_parameter', 'parameter', ['parameter_id'], ['id'], ondelete='CASCADE')
+
+ # Correct old presenter template details
+ presenters = session.query(Presenter_d776f47ce040).filter_by(type = 'PDF_PRESENTER').all()
+ for pres in presenters:
+ presenterParameters = session.query(PresenterParameter_d776f47ce040).filter_by(presenter_id = pres.id).all()
+ for presParam in presenterParameters:
+ parameters = session.query(Parameter_d776f47ce040).filter_by(id = presParam.parameter_id).all()
+ for param in parameters:
+ if param.key == "HEADER_TEMPLATE_PATH" or param.key == "FOOTER_TEMPLATE_PATH":
+ session.delete(param)
+ print(f"Old parameter deleted... ({param.key})", flush=True)
+ elif param.key == "BODY_TEMPLATE_PATH":
+ param.key = "PDF_TEMPLATE_PATH"
+ param.name = "PDF template with its path"
+ param.description = "Path of pdf template file"
+ session.add(param)
+ val = session.query(ParameterValue_d776f47ce040).filter_by(parameter_id = param.id).first()
+ if val:
+ val.value = val.value.replace("pdf_body_template.html", "pdf_template.html")
+ session.add(val)
+ print(f"Old parameter updated... ({param.key})", flush=True)
+ session.commit()
+
+def downgrade():
+ delete_previous()
+ # parameter -> presenter_parameter
+ op.create_foreign_key('presenter_parameter_parameter_id_fkey', 'presenter_parameter', 'parameter', ['parameter_id'], ['id'])
+
+def delete_previous():
+ print("Deleting previous constraints...", flush=True)
+ op.drop_constraint('presenter_parameter_parameter_id_fkey', 'presenter_parameter', type_='foreignkey')
+ print("Adding new constraints...", flush=True)
\ No newline at end of file
diff --git a/src/core/model/report_item.py b/src/core/model/report_item.py
index a10de345a..b1bf3fd10 100644
--- a/src/core/model/report_item.py
+++ b/src/core/model/report_item.py
@@ -375,14 +375,15 @@ def update_report_item(cls, id, data, user):
if 'attribute_id' in data:
for attribute in report_item.attributes:
- if attribute.id == data['attribute_id']:
+ # sometime we compare: int & int or int & str
+ if str(attribute.id) == str(data['attribute_id']):
if attribute.value != data['attribute_value']:
modified = True
attribute.value = data['attribute_value']
data['attribute_value'] = ''
attribute.user = user
attribute.last_updated = datetime.now()
- break
+ break
if 'add' in data:
if 'attribute_id' in data:
@@ -407,7 +408,8 @@ def update_report_item(cls, id, data, user):
if 'attribute_id' in data:
attribute_to_delete = None
for attribute in report_item.attributes:
- if attribute.id == data['attribute_id']:
+ # sometime we compare: int & int or int & str
+ if str(attribute.id) == str(data['attribute_id']):
attribute_to_delete = attribute
break
@@ -466,7 +468,7 @@ def get_updated_data(cls, id, data):
if 'attribute_id' in data:
for attribute in report_item.attributes:
- if attribute.id == data['attribute_id']:
+ if str(attribute.id) == data['attribute_id']:
data['attribute_value'] = attribute.value
data['attribute_last_updated'] = attribute.last_updated.strftime('%d.%m.%Y - %H:%M')
data['attribute_user'] = attribute.user.name
@@ -489,7 +491,7 @@ def get_updated_data(cls, id, data):
if 'attribute_id' in data:
for attribute in report_item.attributes:
- if attribute.id == data['attribute_id']:
+ if str(attribute.id) == data['attribute_id']:
data['attribute_value'] = attribute.value
data['binary_mime_type'] = attribute.binary_mime_type
data['binary_size'] = attribute.binary_size
diff --git a/src/gui/src/components/analyze/NewReportItem.vue b/src/gui/src/components/analyze/NewReportItem.vue
index e8bf6a539..80782b3a9 100644
--- a/src/gui/src/components/analyze/NewReportItem.vue
+++ b/src/gui/src/components/analyze/NewReportItem.vue
@@ -226,14 +226,14 @@