Feat/deployment update (#56)

* feat: update analyzer thresholds and add crawl configuration options

* feat: update Helm chart version and add README for installation instructions

* feat: update installation instructions in README and add Docker support

* feat: update deployment manifests and configuration for improved service handling and analyzer settings

* feat: add API endpoint for paginated IP retrieval and enhance dashboard visualization with category filters

* feat: update configuration for Krawl service to use external config file

* feat: refactor code for improved readability and consistency across multiple files

* feat: remove Flake8, Pylint, and test steps from PR checks workflow
This commit is contained in:
Lorenzo Venerandi
2026-01-26 12:36:22 +01:00
committed by GitHub
parent 130e81ad64
commit 8c76f6c847
20 changed files with 1025 additions and 269 deletions

View File

@@ -511,7 +511,10 @@ class Handler(BaseHTTPRequestHandler):
return
# API endpoint for fetching all IP statistics
if self.config.dashboard_secret_path and self.path == f"{self.config.dashboard_secret_path}/api/all-ip-stats":
if (
self.config.dashboard_secret_path
and self.path == f"{self.config.dashboard_secret_path}/api/all-ip-stats"
):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.send_header("Access-Control-Allow-Origin", "*")
@@ -554,7 +557,7 @@ class Handler(BaseHTTPRequestHandler):
from urllib.parse import urlparse, parse_qs
db = get_database()
# Parse query parameters
parsed_url = urlparse(self.path)
query_params = parse_qs(parsed_url.query)
@@ -567,7 +570,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100) # Max 100 per page
result = db.get_attackers_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_attackers_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
@@ -576,6 +584,52 @@ class Handler(BaseHTTPRequestHandler):
self.wfile.write(json.dumps({"error": str(e)}).encode())
return
# API endpoint for fetching all IPs (all categories)
if self.config.dashboard_secret_path and self.path.startswith(
f"{self.config.dashboard_secret_path}/api/all-ips"
):
self.send_response(200)
self.send_header("Content-type", "application/json")
self.send_header("Access-Control-Allow-Origin", "*")
self.send_header(
"Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"
)
self.send_header("Pragma", "no-cache")
self.send_header("Expires", "0")
self.end_headers()
try:
from database import get_database
import json
from urllib.parse import urlparse, parse_qs
db = get_database()
# Parse query parameters
parsed_url = urlparse(self.path)
query_params = parse_qs(parsed_url.query)
page = int(query_params.get("page", ["1"])[0])
page_size = int(query_params.get("page_size", ["25"])[0])
sort_by = query_params.get("sort_by", ["total_requests"])[0]
sort_order = query_params.get("sort_order", ["desc"])[0]
# Ensure valid parameters
page = max(1, page)
page_size = min(max(1, page_size), 100) # Max 100 per page
result = db.get_all_ips_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
except Exception as e:
self.app_logger.error(f"Error fetching all IPs: {e}")
self.wfile.write(json.dumps({"error": str(e)}).encode())
return
# API endpoint for fetching IP stats
if self.config.dashboard_secret_path and self.path.startswith(
f"{self.config.dashboard_secret_path}/api/ip-stats/"
@@ -639,7 +693,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100)
result = db.get_honeypot_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_honeypot_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
@@ -677,7 +736,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100)
result = db.get_credentials_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_credentials_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
@@ -715,7 +779,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100)
result = db.get_top_ips_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_top_ips_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
@@ -753,7 +822,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100)
result = db.get_top_paths_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_top_paths_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
@@ -791,7 +865,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100)
result = db.get_top_user_agents_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_top_user_agents_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass
@@ -829,7 +908,12 @@ class Handler(BaseHTTPRequestHandler):
page = max(1, page)
page_size = min(max(1, page_size), 100)
result = db.get_attack_types_paginated(page=page, page_size=page_size, sort_by=sort_by, sort_order=sort_order)
result = db.get_attack_types_paginated(
page=page,
page_size=page_size,
sort_by=sort_by,
sort_order=sort_order,
)
self.wfile.write(json.dumps(result).encode())
except BrokenPipeError:
pass