2025-12-14 19:08:01 +01:00
|
|
|
#!/usr/bin/env python3
|
|
|
|
|
|
2026-02-17 18:09:35 +01:00
|
|
|
from typing import Dict, Tuple, Optional
|
2025-12-24 10:25:00 -06:00
|
|
|
import re
|
2025-12-27 19:17:27 +01:00
|
|
|
import urllib.parse
|
2026-01-24 23:28:10 +01:00
|
|
|
|
2026-01-03 17:14:58 +01:00
|
|
|
from wordlists import get_wordlists
|
2025-12-28 10:43:32 -06:00
|
|
|
from database import get_database, DatabaseManager
|
2025-12-14 19:08:01 +01:00
|
|
|
|
2026-02-17 15:17:54 +01:00
|
|
|
# Module-level singleton for background task access
|
|
|
|
|
_tracker_instance: "AccessTracker | None" = None
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_tracker() -> "AccessTracker | None":
|
|
|
|
|
"""Get the global AccessTracker singleton (set during app startup)."""
|
|
|
|
|
return _tracker_instance
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def set_tracker(tracker: "AccessTracker"):
|
|
|
|
|
"""Store the AccessTracker singleton for background task access."""
|
|
|
|
|
global _tracker_instance
|
|
|
|
|
_tracker_instance = tracker
|
|
|
|
|
|
2025-12-14 19:08:01 +01:00
|
|
|
|
|
|
|
|
class AccessTracker:
|
2025-12-28 10:43:32 -06:00
|
|
|
"""
|
|
|
|
|
Track IP addresses and paths accessed.
|
|
|
|
|
|
|
|
|
|
Maintains in-memory structures for fast dashboard access and
|
|
|
|
|
persists data to SQLite for long-term storage and analysis.
|
|
|
|
|
"""
|
2026-01-23 22:00:21 +01:00
|
|
|
|
|
|
|
|
def __init__(
|
|
|
|
|
self,
|
|
|
|
|
max_pages_limit,
|
|
|
|
|
ban_duration_seconds,
|
|
|
|
|
db_manager: Optional[DatabaseManager] = None,
|
|
|
|
|
):
|
2025-12-28 10:43:32 -06:00
|
|
|
"""
|
|
|
|
|
Initialize the access tracker.
|
|
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
db_manager: Optional DatabaseManager for persistence.
|
|
|
|
|
If None, will use the global singleton.
|
|
|
|
|
"""
|
2026-01-23 21:33:32 +01:00
|
|
|
self.max_pages_limit = max_pages_limit
|
|
|
|
|
self.ban_duration_seconds = ban_duration_seconds
|
2026-01-24 23:28:10 +01:00
|
|
|
|
2026-02-08 16:02:18 +01:00
|
|
|
# Load suspicious patterns from wordlists
|
|
|
|
|
wl = get_wordlists()
|
|
|
|
|
self.suspicious_patterns = wl.suspicious_patterns
|
|
|
|
|
|
|
|
|
|
# Fallback if wordlists not loaded
|
|
|
|
|
if not self.suspicious_patterns:
|
|
|
|
|
self.suspicious_patterns = [
|
|
|
|
|
"bot",
|
|
|
|
|
"crawler",
|
|
|
|
|
"spider",
|
|
|
|
|
"scraper",
|
|
|
|
|
"curl",
|
|
|
|
|
"wget",
|
|
|
|
|
"python-requests",
|
|
|
|
|
"scanner",
|
|
|
|
|
"nikto",
|
|
|
|
|
"sqlmap",
|
|
|
|
|
"nmap",
|
|
|
|
|
"masscan",
|
|
|
|
|
"nessus",
|
|
|
|
|
"acunetix",
|
|
|
|
|
"burp",
|
|
|
|
|
"zap",
|
|
|
|
|
"w3af",
|
|
|
|
|
"metasploit",
|
|
|
|
|
"nuclei",
|
|
|
|
|
"gobuster",
|
|
|
|
|
"dirbuster",
|
|
|
|
|
]
|
2025-12-24 10:25:00 -06:00
|
|
|
|
2026-01-03 17:14:58 +01:00
|
|
|
# Load attack patterns from wordlists
|
|
|
|
|
self.attack_types = wl.attack_patterns
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2026-01-03 17:14:58 +01:00
|
|
|
# Fallback if wordlists not loaded
|
|
|
|
|
if not self.attack_types:
|
|
|
|
|
self.attack_types = {
|
2026-01-23 22:00:21 +01:00
|
|
|
"path_traversal": r"\.\.",
|
|
|
|
|
"sql_injection": r"('|--|;|\bOR\b|\bUNION\b|\bSELECT\b|\bDROP\b)",
|
|
|
|
|
"xss_attempt": r"(<script|javascript:|onerror=|onload=)",
|
2026-02-17 15:17:54 +01:00
|
|
|
"common_probes": r"(/admin|/backup|/config|/database|/private|/uploads|/wp-admin|/login|/phpMyAdmin|/phpmyadmin|/users|/search|/contact|/info|/input|/feedback|/server|/api/v1/|/api/v2/|/api/search|/api/sql|/api/database|\.env|/credentials\.txt|/passwords\.txt|\.git|/backup\.sql|/db_backup\.sql)",
|
2026-02-08 16:02:18 +01:00
|
|
|
"command_injection": r"(\||;|`|\$\(|&&)",
|
2026-01-03 17:14:58 +01:00
|
|
|
}
|
2025-12-24 10:25:00 -06:00
|
|
|
|
2025-12-28 10:43:32 -06:00
|
|
|
# Database manager for persistence (lazily initialized)
|
|
|
|
|
self._db_manager = db_manager
|
|
|
|
|
|
|
|
|
|
@property
|
|
|
|
|
def db(self) -> Optional[DatabaseManager]:
|
|
|
|
|
"""
|
|
|
|
|
Get the database manager, lazily initializing if needed.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
DatabaseManager instance or None if not available
|
|
|
|
|
"""
|
|
|
|
|
if self._db_manager is None:
|
|
|
|
|
try:
|
|
|
|
|
self._db_manager = get_database()
|
|
|
|
|
except Exception:
|
|
|
|
|
# Database not initialized, persistence disabled
|
|
|
|
|
pass
|
|
|
|
|
return self._db_manager
|
|
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
def parse_credentials(self, post_data: str) -> Tuple[str, str]:
|
|
|
|
|
"""
|
|
|
|
|
Parse username and password from POST data.
|
|
|
|
|
Returns tuple (username, password) or (None, None) if not found.
|
|
|
|
|
"""
|
|
|
|
|
if not post_data:
|
|
|
|
|
return None, None
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
username = None
|
|
|
|
|
password = None
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
try:
|
|
|
|
|
# Parse URL-encoded form data
|
|
|
|
|
parsed = urllib.parse.parse_qs(post_data)
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2026-02-08 16:02:18 +01:00
|
|
|
# Get credential field names from wordlists
|
|
|
|
|
wl = get_wordlists()
|
|
|
|
|
username_fields = wl.username_fields
|
|
|
|
|
password_fields = wl.password_fields
|
|
|
|
|
|
|
|
|
|
# Fallback if wordlists not loaded
|
|
|
|
|
if not username_fields:
|
|
|
|
|
username_fields = [
|
|
|
|
|
"username",
|
|
|
|
|
"user",
|
|
|
|
|
"login",
|
|
|
|
|
"email",
|
|
|
|
|
"log",
|
|
|
|
|
"userid",
|
|
|
|
|
"account",
|
|
|
|
|
]
|
|
|
|
|
if not password_fields:
|
|
|
|
|
password_fields = ["password", "pass", "passwd", "pwd", "passphrase"]
|
|
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
for field in username_fields:
|
|
|
|
|
if field in parsed and parsed[field]:
|
|
|
|
|
username = parsed[field][0]
|
|
|
|
|
break
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
for field in password_fields:
|
|
|
|
|
if field in parsed and parsed[field]:
|
|
|
|
|
password = parsed[field][0]
|
|
|
|
|
break
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
except Exception:
|
|
|
|
|
# If parsing fails, try simple regex patterns
|
2026-02-08 16:02:18 +01:00
|
|
|
wl = get_wordlists()
|
2026-02-15 15:10:41 +01:00
|
|
|
username_fields = wl.username_fields or [
|
|
|
|
|
"username",
|
|
|
|
|
"user",
|
|
|
|
|
"login",
|
|
|
|
|
"email",
|
|
|
|
|
"log",
|
|
|
|
|
]
|
|
|
|
|
password_fields = wl.password_fields or [
|
|
|
|
|
"password",
|
|
|
|
|
"pass",
|
|
|
|
|
"passwd",
|
|
|
|
|
"pwd",
|
|
|
|
|
]
|
|
|
|
|
|
2026-02-08 16:02:18 +01:00
|
|
|
# Build regex pattern from wordlist fields
|
|
|
|
|
username_pattern = "(?:" + "|".join(username_fields) + ")=([^&\\s]+)"
|
|
|
|
|
password_pattern = "(?:" + "|".join(password_fields) + ")=([^&\\s]+)"
|
2026-02-15 15:10:41 +01:00
|
|
|
|
2026-02-08 16:02:18 +01:00
|
|
|
username_match = re.search(username_pattern, post_data, re.IGNORECASE)
|
|
|
|
|
password_match = re.search(password_pattern, post_data, re.IGNORECASE)
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
if username_match:
|
|
|
|
|
username = urllib.parse.unquote_plus(username_match.group(1))
|
|
|
|
|
if password_match:
|
|
|
|
|
password = urllib.parse.unquote_plus(password_match.group(1))
|
2026-01-17 18:06:09 +01:00
|
|
|
|
2025-12-27 19:17:27 +01:00
|
|
|
return username, password
|
|
|
|
|
|
2026-01-23 22:00:21 +01:00
|
|
|
def record_credential_attempt(
|
|
|
|
|
self, ip: str, path: str, username: str, password: str
|
|
|
|
|
):
|
2025-12-28 10:43:32 -06:00
|
|
|
"""
|
|
|
|
|
Record a credential login attempt.
|
|
|
|
|
|
|
|
|
|
Stores in both in-memory list and SQLite database.
|
2026-01-24 23:28:10 +01:00
|
|
|
Skips recording if the IP is the server's own public IP.
|
2025-12-28 10:43:32 -06:00
|
|
|
"""
|
2026-01-24 23:28:10 +01:00
|
|
|
# Skip if this is the server's own IP
|
|
|
|
|
from config import get_config
|
2026-01-26 12:36:22 +01:00
|
|
|
|
2026-01-24 23:28:10 +01:00
|
|
|
config = get_config()
|
|
|
|
|
server_ip = config.get_server_ip()
|
|
|
|
|
if server_ip and ip == server_ip:
|
|
|
|
|
return
|
|
|
|
|
|
2025-12-28 10:43:32 -06:00
|
|
|
# Persist to database
|
|
|
|
|
if self.db:
|
|
|
|
|
try:
|
|
|
|
|
self.db.persist_credential(
|
2026-01-23 22:00:21 +01:00
|
|
|
ip=ip, path=path, username=username, password=password
|
2025-12-28 10:43:32 -06:00
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
# Don't crash if database persistence fails
|
|
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
def record_access(
|
|
|
|
|
self,
|
|
|
|
|
ip: str,
|
|
|
|
|
path: str,
|
2026-01-23 22:00:21 +01:00
|
|
|
user_agent: str = "",
|
|
|
|
|
body: str = "",
|
|
|
|
|
method: str = "GET",
|
2026-02-08 16:02:18 +01:00
|
|
|
raw_request: str = "",
|
2025-12-28 10:43:32 -06:00
|
|
|
):
|
|
|
|
|
"""
|
|
|
|
|
Record an access attempt.
|
|
|
|
|
|
|
|
|
|
Stores in both in-memory structures and SQLite database.
|
2026-01-24 23:28:10 +01:00
|
|
|
Skips recording if the IP is the server's own public IP.
|
2025-12-28 10:43:32 -06:00
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
ip: Client IP address
|
|
|
|
|
path: Requested path
|
|
|
|
|
user_agent: Client user agent string
|
|
|
|
|
body: Request body (for POST/PUT)
|
|
|
|
|
method: HTTP method
|
2026-02-08 16:02:18 +01:00
|
|
|
raw_request: Full raw HTTP request for forensic analysis
|
2025-12-28 10:43:32 -06:00
|
|
|
"""
|
2026-01-24 23:28:10 +01:00
|
|
|
# Skip if this is the server's own IP
|
|
|
|
|
from config import get_config
|
2026-01-26 12:36:22 +01:00
|
|
|
|
2026-01-24 23:28:10 +01:00
|
|
|
config = get_config()
|
|
|
|
|
server_ip = config.get_server_ip()
|
|
|
|
|
if server_ip and ip == server_ip:
|
|
|
|
|
return
|
|
|
|
|
|
2025-12-28 10:43:32 -06:00
|
|
|
# Path attack type detection
|
2025-12-24 10:25:00 -06:00
|
|
|
attack_findings = self.detect_attack_type(path)
|
|
|
|
|
|
2025-12-28 10:43:32 -06:00
|
|
|
# POST/PUT body attack detection
|
2025-12-24 10:25:00 -06:00
|
|
|
if len(body) > 0:
|
2026-02-08 16:02:18 +01:00
|
|
|
# Decode URL-encoded body so patterns can match (e.g., %3Cscript%3E -> <script>)
|
|
|
|
|
decoded_body = urllib.parse.unquote(body)
|
|
|
|
|
attack_findings.extend(self.detect_attack_type(decoded_body))
|
2025-12-24 10:25:00 -06:00
|
|
|
|
2025-12-28 10:43:32 -06:00
|
|
|
is_suspicious = (
|
2026-01-23 22:00:21 +01:00
|
|
|
self.is_suspicious_user_agent(user_agent)
|
|
|
|
|
or self.is_honeypot_path(path)
|
|
|
|
|
or len(attack_findings) > 0
|
2025-12-28 10:43:32 -06:00
|
|
|
)
|
|
|
|
|
is_honeypot = self.is_honeypot_path(path)
|
2025-12-24 10:25:00 -06:00
|
|
|
|
2025-12-28 10:43:32 -06:00
|
|
|
# In-memory storage for dashboard
|
|
|
|
|
# Persist to database
|
|
|
|
|
if self.db:
|
|
|
|
|
try:
|
|
|
|
|
self.db.persist_access(
|
|
|
|
|
ip=ip,
|
|
|
|
|
path=path,
|
|
|
|
|
user_agent=user_agent,
|
|
|
|
|
method=method,
|
|
|
|
|
is_suspicious=is_suspicious,
|
|
|
|
|
is_honeypot_trigger=is_honeypot,
|
2026-01-23 22:00:21 +01:00
|
|
|
attack_types=attack_findings if attack_findings else None,
|
2026-02-08 16:02:18 +01:00
|
|
|
raw_request=raw_request if raw_request else None,
|
2025-12-28 10:43:32 -06:00
|
|
|
)
|
|
|
|
|
except Exception:
|
|
|
|
|
# Don't crash if database persistence fails
|
|
|
|
|
pass
|
|
|
|
|
|
2026-01-23 22:00:21 +01:00
|
|
|
def detect_attack_type(self, data: str) -> list[str]:
|
2025-12-24 10:25:00 -06:00
|
|
|
"""
|
|
|
|
|
Returns a list of all attack types found in path data
|
|
|
|
|
"""
|
|
|
|
|
findings = []
|
|
|
|
|
for name, pattern in self.attack_types.items():
|
|
|
|
|
if re.search(pattern, data, re.IGNORECASE):
|
|
|
|
|
findings.append(name)
|
|
|
|
|
return findings
|
|
|
|
|
|
2025-12-14 19:08:01 +01:00
|
|
|
def is_honeypot_path(self, path: str) -> bool:
|
|
|
|
|
"""Check if path is one of the honeypot traps from robots.txt"""
|
|
|
|
|
honeypot_paths = [
|
2026-01-23 22:00:21 +01:00
|
|
|
"/admin",
|
|
|
|
|
"/admin/",
|
|
|
|
|
"/backup",
|
|
|
|
|
"/backup/",
|
|
|
|
|
"/config",
|
|
|
|
|
"/config/",
|
|
|
|
|
"/private",
|
|
|
|
|
"/private/",
|
|
|
|
|
"/database",
|
|
|
|
|
"/database/",
|
|
|
|
|
"/credentials.txt",
|
|
|
|
|
"/passwords.txt",
|
|
|
|
|
"/admin_notes.txt",
|
|
|
|
|
"/api_keys.json",
|
|
|
|
|
"/.env",
|
|
|
|
|
"/wp-admin",
|
|
|
|
|
"/wp-admin/",
|
|
|
|
|
"/phpmyadmin",
|
|
|
|
|
"/phpMyAdmin/",
|
2025-12-14 19:08:01 +01:00
|
|
|
]
|
2026-01-23 22:00:21 +01:00
|
|
|
return path in honeypot_paths or any(
|
|
|
|
|
hp in path.lower()
|
|
|
|
|
for hp in [
|
|
|
|
|
"/backup",
|
|
|
|
|
"/admin",
|
|
|
|
|
"/config",
|
|
|
|
|
"/private",
|
|
|
|
|
"/database",
|
|
|
|
|
"phpmyadmin",
|
|
|
|
|
]
|
|
|
|
|
)
|
2025-12-14 19:08:01 +01:00
|
|
|
|
|
|
|
|
def is_suspicious_user_agent(self, user_agent: str) -> bool:
|
|
|
|
|
"""Check if user agent matches suspicious patterns"""
|
|
|
|
|
if not user_agent:
|
|
|
|
|
return True
|
|
|
|
|
ua_lower = user_agent.lower()
|
|
|
|
|
return any(pattern in ua_lower for pattern in self.suspicious_patterns)
|
|
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
def get_category_by_ip(self, client_ip: str) -> str:
|
|
|
|
|
"""
|
|
|
|
|
Check if an IP has been categorized as a 'good crawler' in the database.
|
|
|
|
|
Uses the IP category from IpStats table.
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
Args:
|
|
|
|
|
client_ip: The client IP address (will be sanitized)
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
Returns:
|
|
|
|
|
True if the IP is categorized as 'good crawler', False otherwise
|
|
|
|
|
"""
|
|
|
|
|
try:
|
|
|
|
|
from sanitizer import sanitize_ip
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
# Sanitize the IP address
|
|
|
|
|
safe_ip = sanitize_ip(client_ip)
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
# Query the database for this IP's category
|
|
|
|
|
db = self.db
|
|
|
|
|
if not db:
|
|
|
|
|
return False
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
ip_stats = db.get_ip_stats_by_ip(safe_ip)
|
2026-01-23 22:00:21 +01:00
|
|
|
if not ip_stats or not ip_stats.get("category"):
|
2026-01-23 21:33:32 +01:00
|
|
|
return False
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
# Check if category matches "good crawler"
|
2026-01-23 22:00:21 +01:00
|
|
|
category = ip_stats.get("category", "").lower().strip()
|
2026-01-23 21:33:32 +01:00
|
|
|
return category
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
except Exception as e:
|
|
|
|
|
# Log but don't crash on database errors
|
|
|
|
|
import logging
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
logging.error(f"Error checking IP category for {client_ip}: {str(e)}")
|
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
|
def increment_page_visit(self, client_ip: str) -> int:
|
|
|
|
|
"""
|
2026-02-22 16:23:52 +01:00
|
|
|
Increment page visit counter for an IP via DB and return the new count.
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
Args:
|
|
|
|
|
client_ip: The client IP address
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
Returns:
|
|
|
|
|
The updated page visit count for this IP
|
|
|
|
|
"""
|
2026-01-24 23:28:10 +01:00
|
|
|
from config import get_config
|
2026-01-26 12:36:22 +01:00
|
|
|
|
2026-01-24 23:28:10 +01:00
|
|
|
config = get_config()
|
|
|
|
|
server_ip = config.get_server_ip()
|
|
|
|
|
if server_ip and client_ip == server_ip:
|
|
|
|
|
return 0
|
|
|
|
|
|
2026-02-22 16:23:52 +01:00
|
|
|
if not self.db:
|
2026-01-23 21:33:32 +01:00
|
|
|
return 0
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-02-22 16:23:52 +01:00
|
|
|
return self.db.increment_page_visit(client_ip, self.max_pages_limit)
|
|
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
def is_banned_ip(self, client_ip: str) -> bool:
|
|
|
|
|
"""
|
2026-02-22 16:23:52 +01:00
|
|
|
Check if an IP is currently banned.
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-01-23 21:33:32 +01:00
|
|
|
Args:
|
|
|
|
|
client_ip: The client IP address
|
|
|
|
|
Returns:
|
|
|
|
|
True if the IP is banned, False otherwise
|
2026-01-23 22:00:21 +01:00
|
|
|
"""
|
2026-02-22 16:23:52 +01:00
|
|
|
if not self.db:
|
2026-01-24 23:28:10 +01:00
|
|
|
return False
|
2026-01-23 21:33:32 +01:00
|
|
|
|
2026-02-22 16:23:52 +01:00
|
|
|
return self.db.is_banned_ip(client_ip, self.ban_duration_seconds)
|
2026-01-23 21:33:32 +01:00
|
|
|
|
2026-01-24 23:28:10 +01:00
|
|
|
def get_ban_info(self, client_ip: str) -> dict:
|
|
|
|
|
"""
|
|
|
|
|
Get detailed ban information for an IP.
|
|
|
|
|
|
|
|
|
|
Returns:
|
|
|
|
|
Dictionary with ban status, violations, and remaining ban time
|
|
|
|
|
"""
|
2026-02-22 16:23:52 +01:00
|
|
|
if not self.db:
|
2026-01-24 23:28:10 +01:00
|
|
|
return {
|
|
|
|
|
"is_banned": False,
|
|
|
|
|
"violations": 0,
|
|
|
|
|
"ban_multiplier": 1,
|
|
|
|
|
"remaining_ban_seconds": 0,
|
|
|
|
|
}
|
2026-01-23 22:00:21 +01:00
|
|
|
|
2026-02-22 16:23:52 +01:00
|
|
|
return self.db.get_ban_info(client_ip, self.ban_duration_seconds)
|
2026-01-23 21:33:32 +01:00
|
|
|
|
2025-12-14 19:08:01 +01:00
|
|
|
def get_stats(self) -> Dict:
|
2025-12-28 13:52:46 -06:00
|
|
|
"""Get statistics summary from database."""
|
|
|
|
|
if not self.db:
|
|
|
|
|
raise RuntimeError("Database not available for dashboard stats")
|
|
|
|
|
|
|
|
|
|
# Get aggregate counts from database
|
|
|
|
|
stats = self.db.get_dashboard_counts()
|
|
|
|
|
|
|
|
|
|
# Add detailed lists from database
|
2026-01-23 22:00:21 +01:00
|
|
|
stats["top_ips"] = self.db.get_top_ips(10)
|
|
|
|
|
stats["top_paths"] = self.db.get_top_paths(10)
|
|
|
|
|
stats["top_user_agents"] = self.db.get_top_user_agents(10)
|
|
|
|
|
stats["recent_suspicious"] = self.db.get_recent_suspicious(20)
|
|
|
|
|
stats["honeypot_triggered_ips"] = self.db.get_honeypot_triggered_ips()
|
|
|
|
|
stats["attack_types"] = self.db.get_recent_attacks(20)
|
|
|
|
|
stats["credential_attempts"] = self.db.get_credential_attempts(limit=50)
|
2025-12-28 13:52:46 -06:00
|
|
|
|
|
|
|
|
return stats
|