parametrized into config.yaml + bug fix
This commit is contained in:
@@ -7,7 +7,7 @@ from pathlib import Path
|
||||
from datetime import datetime, timedelta
|
||||
import re
|
||||
from wordlists import get_wordlists
|
||||
|
||||
from config import get_config
|
||||
"""
|
||||
Functions for user activity analysis
|
||||
"""
|
||||
@@ -47,6 +47,17 @@ class Analyzer:
|
||||
|
||||
def infer_user_category(self, ip: str) -> str:
|
||||
|
||||
config = get_config()
|
||||
|
||||
http_risky_methods_threshold = config.http_risky_methods_threshold
|
||||
violated_robots_threshold = config.violated_robots_threshold
|
||||
uneven_request_timing_threshold = config.uneven_request_timing_threshold
|
||||
user_agents_used_threshold = config.user_agents_used_threshold
|
||||
attack_urls_threshold = config.attack_urls_threshold
|
||||
uneven_request_timing_time_window_seconds = config.uneven_request_timing_time_window_seconds
|
||||
|
||||
print(f"http_risky_methods_threshold: {http_risky_methods_threshold}")
|
||||
|
||||
score = {}
|
||||
score["attacker"] = {"risky_http_methods": False, "robots_violations": False, "uneven_request_timing": False, "different_user_agents": False, "attack_url": False}
|
||||
score["good_crawler"] = {"risky_http_methods": False, "robots_violations": False, "uneven_request_timing": False, "different_user_agents": False, "attack_url": False}
|
||||
@@ -104,14 +115,13 @@ class Analyzer:
|
||||
#print(f"TOTAL: {total_accesses_count} - GET: {get_accesses_count} - POST: {post_accesses_count}")
|
||||
|
||||
|
||||
#if >5% attacker or bad crawler
|
||||
if total_accesses_count > 0:
|
||||
if total_accesses_count > http_risky_methods_threshold:
|
||||
http_method_attacker_score = (post_accesses_count + put_accesses_count + delete_accesses_count + options_accesses_count + patch_accesses_count) / total_accesses_count
|
||||
else:
|
||||
http_method_attacker_score = 0
|
||||
|
||||
#print(f"HTTP Method attacker score: {http_method_attacker_score}")
|
||||
if http_method_attacker_score > 0.2:
|
||||
if http_method_attacker_score >= http_risky_methods_threshold:
|
||||
score["attacker"]["risky_http_methods"] = True
|
||||
score["good_crawler"]["risky_http_methods"] = False
|
||||
score["bad_crawler"]["risky_http_methods"] = True
|
||||
@@ -150,33 +160,28 @@ class Analyzer:
|
||||
else:
|
||||
violated_robots_ratio = 0
|
||||
|
||||
if violated_robots_ratio > 0.10:
|
||||
if violated_robots_ratio >= violated_robots_threshold:
|
||||
score["attacker"]["robots_violations"] = True
|
||||
score["good_crawler"]["robots_violations"] = False
|
||||
score["bad_crawler"]["robots_violations"] = True
|
||||
score["regular_user"]["robots_violations"] = False
|
||||
else:
|
||||
score["attacker"]["robots_violations"] = True
|
||||
score["attacker"]["robots_violations"] = False
|
||||
score["good_crawler"]["robots_violations"] = False
|
||||
score["bad_crawler"]["robots_violations"] = True
|
||||
score["bad_crawler"]["robots_violations"] = False
|
||||
score["regular_user"]["robots_violations"] = False
|
||||
|
||||
#--------------------- Requests Timing ---------------------
|
||||
#Request rate and timing: steady, throttled, polite vs attackers' bursty, aggressive, or oddly rhythmic behavior
|
||||
timestamps = [datetime.fromisoformat(item["timestamp"]) for item in accesses]
|
||||
print(f"Timestamps #: {len(timestamps)}")
|
||||
timestamps = [ts for ts in timestamps if datetime.utcnow() - ts <= timedelta(minutes=5)]
|
||||
print(f"Timestamps #: {len(timestamps)}")
|
||||
timestamps = [ts for ts in timestamps if datetime.utcnow() - ts <= timedelta(seconds=uneven_request_timing_time_window_seconds)]
|
||||
timestamps = sorted(timestamps, reverse=True)
|
||||
print(f"Timestamps #: {len(timestamps)}")
|
||||
|
||||
time_diffs = []
|
||||
for i in range(0, len(timestamps)-1):
|
||||
diff = (timestamps[i] - timestamps[i+1]).total_seconds()
|
||||
time_diffs.append(diff)
|
||||
|
||||
print(f"Time diffs: {time_diffs}")
|
||||
|
||||
mean = 0
|
||||
variance = 0
|
||||
std = 0
|
||||
@@ -186,17 +191,17 @@ class Analyzer:
|
||||
variance = sum((x - mean) ** 2 for x in time_diffs) / len(time_diffs)
|
||||
std = variance ** 0.5
|
||||
cv = std/mean
|
||||
print(f"Mean: {mean} - Variance {variance} - Standard Deviation {std} - Coefficient of Variation: {cv}")
|
||||
#print(f"Mean: {mean} - Variance {variance} - Standard Deviation {std} - Coefficient of Variation: {cv}")
|
||||
|
||||
if mean > 4:
|
||||
if mean >= uneven_request_timing_threshold:
|
||||
score["attacker"]["uneven_request_timing"] = True
|
||||
score["good_crawler"]["uneven_request_timing"] = False
|
||||
score["bad_crawler"]["uneven_request_timing"] = False
|
||||
score["regular_user"]["uneven_request_timing"] = True
|
||||
else:
|
||||
score["attacker"]["uneven_request_timing"] = True
|
||||
score["attacker"]["uneven_request_timing"] = False
|
||||
score["good_crawler"]["uneven_request_timing"] = False
|
||||
score["bad_crawler"]["uneven_request_timing"] = True
|
||||
score["bad_crawler"]["uneven_request_timing"] = False
|
||||
score["regular_user"]["uneven_request_timing"] = False
|
||||
|
||||
|
||||
@@ -206,39 +211,31 @@ class Analyzer:
|
||||
user_agents_used = list(dict.fromkeys(user_agents_used))
|
||||
#print(f"User agents used: {user_agents_used}")
|
||||
|
||||
if len(user_agents_used)> 4:
|
||||
if len(user_agents_used) >= user_agents_used_threshold:
|
||||
score["attacker"]["different_user_agents"] = True
|
||||
score["good_crawler"]["different_user_agents"] = False
|
||||
score["bad_crawler"]["different_user_agentss"] = True
|
||||
score["regular_user"]["different_user_agents"] = False
|
||||
else:
|
||||
score["attacker"]["different_user_agents"] = True
|
||||
score["attacker"]["different_user_agents"] = False
|
||||
score["good_crawler"]["different_user_agents"] = False
|
||||
score["bad_crawler"]["different_user_agents"] = True
|
||||
score["bad_crawler"]["different_user_agents"] = False
|
||||
score["regular_user"]["different_user_agents"] = False
|
||||
|
||||
#--------------------- Attack URLs ---------------------
|
||||
|
||||
attack_url_found = False
|
||||
# attack_types = {
|
||||
# 'path_traversal': r'\.\.',
|
||||
# 'sql_injection': r"('|--|;|\bOR\b|\bUNION\b|\bSELECT\b|\bDROP\b)",
|
||||
# 'xss_attempt': r'(<script|javascript:|onerror=|onload=)',
|
||||
# 'shell_injection': r'(\||;|`|\$\(|&&)'
|
||||
# }
|
||||
attack_urls_found_list = []
|
||||
|
||||
wl = get_wordlists()
|
||||
if wl.attack_urls:
|
||||
queried_paths = [item["path"] for item in accesses]
|
||||
|
||||
for queried_path in queried_paths:
|
||||
#print(f"QUERIED PATH: {queried_path}")
|
||||
for name, pattern in wl.attack_urls.items():
|
||||
#print(f"Pattern: {pattern}")
|
||||
if re.search(pattern, queried_path, re.IGNORECASE):
|
||||
attack_url_found = True
|
||||
attack_url_found_list.append(pattern)
|
||||
|
||||
if attack_url_found:
|
||||
if len(attack_urls_found_list) > attack_urls_threshold:
|
||||
score["attacker"]["attack_url"] = True
|
||||
score["good_crawler"]["attack_url"] = False
|
||||
score["bad_crawler"]["attack_url"] = False
|
||||
@@ -275,12 +272,12 @@ class Analyzer:
|
||||
regular_user_score = regular_user_score + score["regular_user"]["different_user_agents"] * weights["regular_user"]["different_user_agents"]
|
||||
regular_user_score = regular_user_score + score["regular_user"]["attack_url"] * weights["regular_user"]["attack_url"]
|
||||
|
||||
#print(f"Attacker score: {attacker_score}")
|
||||
#print(f"Good Crawler score: {good_crawler_score}")
|
||||
#print(f"Bad Crawler score: {bad_crawler_score}")
|
||||
#print(f"Regular User score: {regular_user_score}")
|
||||
print(f"Attacker score: {attacker_score}")
|
||||
print(f"Good Crawler score: {good_crawler_score}")
|
||||
print(f"Bad Crawler score: {bad_crawler_score}")
|
||||
print(f"Regular User score: {regular_user_score}")
|
||||
|
||||
analyzed_metrics = {"risky_http_methods": http_method_attacker_score, "robots_violations": violated_robots_ratio, "uneven_request_timing": mean, "different_user_agents": user_agents_used, "attack_url": attack_url_found}
|
||||
analyzed_metrics = {"risky_http_methods": http_method_attacker_score, "robots_violations": violated_robots_ratio, "uneven_request_timing": mean, "different_user_agents": user_agents_used, "attack_url": attack_urls_found_list}
|
||||
category_scores = {"attacker": attacker_score, "good_crawler": good_crawler_score, "bad_crawler": bad_crawler_score, "regular_user": regular_user_score}
|
||||
category = max(category_scores, key=category_scores.get)
|
||||
last_analysis = datetime.utcnow()
|
||||
|
||||
121
src/config.py
121
src/config.py
@@ -1,17 +1,22 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import sys
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple
|
||||
from zoneinfo import ZoneInfo
|
||||
import time
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@dataclass
|
||||
class Config:
|
||||
"""Configuration class for the deception server"""
|
||||
port: int = 5000
|
||||
delay: int = 100 # milliseconds
|
||||
server_header: str = ""
|
||||
links_length_range: Tuple[int, int] = (5, 15)
|
||||
links_per_page_range: Tuple[int, int] = (10, 15)
|
||||
char_space: str = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
|
||||
@@ -23,12 +28,20 @@ class Config:
|
||||
api_server_port: int = 8080
|
||||
api_server_path: str = "/api/v2/users"
|
||||
probability_error_codes: int = 0 # Percentage (0-100)
|
||||
server_header: Optional[str] = None
|
||||
|
||||
# Database settings
|
||||
database_path: str = "data/krawl.db"
|
||||
database_retention_days: int = 30
|
||||
timezone: str = None # IANA timezone (e.g., 'America/New_York', 'Europe/Rome')
|
||||
|
||||
|
||||
# Analyzer settings
|
||||
http_risky_methods_threshold: float = None
|
||||
violated_robots_threshold: float = None
|
||||
uneven_request_timing_threshold: float = None
|
||||
uneven_request_timing_time_window_seconds: float = None
|
||||
user_agents_used_threshold: float = None
|
||||
attack_urls_threshold: float = None
|
||||
|
||||
@staticmethod
|
||||
# Try to fetch timezone before if not set
|
||||
def get_system_timezone() -> str:
|
||||
@@ -38,16 +51,16 @@ class Config:
|
||||
tz_path = os.readlink('/etc/localtime')
|
||||
if 'zoneinfo/' in tz_path:
|
||||
return tz_path.split('zoneinfo/')[-1]
|
||||
|
||||
|
||||
local_tz = time.tzname[time.daylight]
|
||||
if local_tz and local_tz != 'UTC':
|
||||
return local_tz
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
# Default fallback to UTC
|
||||
return 'UTC'
|
||||
|
||||
|
||||
def get_timezone(self) -> ZoneInfo:
|
||||
"""Get configured timezone as ZoneInfo object"""
|
||||
if self.timezone:
|
||||
@@ -55,7 +68,7 @@ class Config:
|
||||
return ZoneInfo(self.timezone)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
|
||||
system_tz = self.get_system_timezone()
|
||||
try:
|
||||
return ZoneInfo(system_tz)
|
||||
@@ -63,31 +76,83 @@ class Config:
|
||||
return ZoneInfo('UTC')
|
||||
|
||||
@classmethod
|
||||
def from_env(cls) -> 'Config':
|
||||
"""Create configuration from environment variables"""
|
||||
def from_yaml(cls) -> 'Config':
|
||||
"""Create configuration from YAML file"""
|
||||
config_location = os.getenv('CONFIG_LOCATION', 'config.yaml')
|
||||
config_path = Path(__file__).parent.parent / config_location
|
||||
|
||||
try:
|
||||
with open(config_path, 'r') as f:
|
||||
data = yaml.safe_load(f)
|
||||
except FileNotFoundError:
|
||||
print(f"Error: Configuration file '{config_path}' not found.", file=sys.stderr)
|
||||
print(f"Please create a config.yaml file or set CONFIG_LOCATION environment variable.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
except yaml.YAMLError as e:
|
||||
print(f"Error: Invalid YAML in configuration file '{config_path}': {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
if data is None:
|
||||
data = {}
|
||||
|
||||
# Extract nested values with defaults
|
||||
server = data.get('server', {})
|
||||
links = data.get('links', {})
|
||||
canary = data.get('canary', {})
|
||||
dashboard = data.get('dashboard', {})
|
||||
api = data.get('api', {})
|
||||
database = data.get('database', {})
|
||||
behavior = data.get('behavior', {})
|
||||
analyzer = data.get('analyzer', {})
|
||||
|
||||
# Handle dashboard_secret_path - auto-generate if null/not set
|
||||
dashboard_path = dashboard.get('secret_path')
|
||||
if dashboard_path is None:
|
||||
dashboard_path = f'/{os.urandom(16).hex()}'
|
||||
else:
|
||||
# ensure the dashboard path starts with a /
|
||||
if dashboard_path[:1] != "/":
|
||||
dashboard_path = f"/{dashboard_path}"
|
||||
|
||||
return cls(
|
||||
port=int(os.getenv('PORT', 5000)),
|
||||
delay=int(os.getenv('DELAY', 100)),
|
||||
port=server.get('port', 5000),
|
||||
delay=server.get('delay', 100),
|
||||
server_header=server.get('server_header',""),
|
||||
timezone=server.get('timezone'),
|
||||
links_length_range=(
|
||||
int(os.getenv('LINKS_MIN_LENGTH', 5)),
|
||||
int(os.getenv('LINKS_MAX_LENGTH', 15))
|
||||
links.get('min_length', 5),
|
||||
links.get('max_length', 15)
|
||||
),
|
||||
links_per_page_range=(
|
||||
int(os.getenv('LINKS_MIN_PER_PAGE', 10)),
|
||||
int(os.getenv('LINKS_MAX_PER_PAGE', 15))
|
||||
links.get('min_per_page', 10),
|
||||
links.get('max_per_page', 15)
|
||||
),
|
||||
char_space=os.getenv('CHAR_SPACE', 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'),
|
||||
max_counter=int(os.getenv('MAX_COUNTER', 10)),
|
||||
canary_token_url=os.getenv('CANARY_TOKEN_URL'),
|
||||
canary_token_tries=int(os.getenv('CANARY_TOKEN_TRIES', 10)),
|
||||
dashboard_secret_path=os.getenv('DASHBOARD_SECRET_PATH', f'/{os.urandom(16).hex()}'),
|
||||
api_server_url=os.getenv('API_SERVER_URL'),
|
||||
api_server_port=int(os.getenv('API_SERVER_PORT', 8080)),
|
||||
api_server_path=os.getenv('API_SERVER_PATH', '/api/v2/users'),
|
||||
probability_error_codes=int(os.getenv('PROBABILITY_ERROR_CODES', 0)),
|
||||
server_header=os.getenv('SERVER_HEADER'),
|
||||
database_path=os.getenv('DATABASE_PATH', 'data/krawl.db'),
|
||||
database_retention_days=int(os.getenv('DATABASE_RETENTION_DAYS', 30)),
|
||||
timezone=os.getenv('TIMEZONE') # If not set, will use system timezone
|
||||
|
||||
char_space=links.get('char_space', 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'),
|
||||
max_counter=links.get('max_counter', 10),
|
||||
canary_token_url=canary.get('token_url'),
|
||||
canary_token_tries=canary.get('token_tries', 10),
|
||||
dashboard_secret_path=dashboard_path,
|
||||
api_server_url=api.get('server_url'),
|
||||
api_server_port=api.get('server_port', 8080),
|
||||
api_server_path=api.get('server_path', '/api/v2/users'),
|
||||
probability_error_codes=behavior.get('probability_error_codes', 0),
|
||||
database_path=database.get('path', 'data/krawl.db'),
|
||||
database_retention_days=database.get('retention_days', 30),
|
||||
http_risky_methods_threshold=analyzer.get('http_risky_methods_threshold', 0.1),
|
||||
violated_robots_threshold=analyzer.get('violated_robots_threshold', 0.1),
|
||||
uneven_request_timing_threshold=analyzer.get('uneven_request_timing_threshold', 5),
|
||||
uneven_request_timing_time_window_seconds=analyzer.get('uneven_request_timing_time_window_seconds', 300),
|
||||
user_agents_used_threshold=analyzer.get('user_agents_used_threshold', 1),
|
||||
attack_urls_threshold=analyzer.get('attack_urls_threshold', 1)
|
||||
)
|
||||
|
||||
|
||||
_config_instance = None
|
||||
|
||||
|
||||
def get_config() -> Config:
|
||||
"""Get the singleton Config instance"""
|
||||
global _config_instance
|
||||
if _config_instance is None:
|
||||
_config_instance = Config.from_yaml()
|
||||
return _config_instance
|
||||
|
||||
@@ -9,8 +9,7 @@ import string
|
||||
import json
|
||||
from templates import html_templates
|
||||
from wordlists import get_wordlists
|
||||
from config import Config
|
||||
from logger import get_app_logger
|
||||
from config import get_config
|
||||
|
||||
def random_username() -> str:
|
||||
"""Generate random username"""
|
||||
@@ -38,15 +37,12 @@ def random_email(username: str = None) -> str:
|
||||
return f"{username}@{random.choice(wl.email_domains)}"
|
||||
|
||||
def random_server_header() -> str:
|
||||
"""Generate random server header"""
|
||||
|
||||
if Config.from_env().server_header:
|
||||
server_header = Config.from_env().server_header
|
||||
else:
|
||||
wl = get_wordlists()
|
||||
server_header = random.choice(wl.server_headers)
|
||||
|
||||
return server_header
|
||||
"""Generate random server header from wordlists"""
|
||||
config = get_config()
|
||||
if config.server_header:
|
||||
return config.server_header
|
||||
wl = get_wordlists()
|
||||
return random.choice(wl.server_headers)
|
||||
|
||||
def random_api_key() -> str:
|
||||
"""Generate random API key"""
|
||||
|
||||
154
src/handler.py
154
src/handler.py
@@ -6,6 +6,7 @@ import time
|
||||
from datetime import datetime
|
||||
from http.server import BaseHTTPRequestHandler
|
||||
from typing import Optional, List
|
||||
from urllib.parse import urlparse, parse_qs
|
||||
|
||||
from config import Config
|
||||
from tracker import AccessTracker
|
||||
@@ -17,6 +18,9 @@ from generators import (
|
||||
api_response, directory_listing, random_server_header
|
||||
)
|
||||
from wordlists import get_wordlists
|
||||
from sql_errors import generate_sql_error_response, get_sql_response_with_data
|
||||
from xss_detector import detect_xss_pattern, generate_xss_response
|
||||
from server_errors import generate_server_error
|
||||
|
||||
|
||||
class Handler(BaseHTTPRequestHandler):
|
||||
@@ -69,6 +73,67 @@ class Handler(BaseHTTPRequestHandler):
|
||||
if not error_codes:
|
||||
error_codes = [400, 401, 403, 404, 500, 502, 503]
|
||||
return random.choice(error_codes)
|
||||
|
||||
def _parse_query_string(self) -> str:
|
||||
"""Extract query string from the request path"""
|
||||
parsed = urlparse(self.path)
|
||||
return parsed.query
|
||||
|
||||
def _handle_sql_endpoint(self, path: str) -> bool:
|
||||
"""
|
||||
Handle SQL injection honeypot endpoints.
|
||||
Returns True if the path was handled, False otherwise.
|
||||
"""
|
||||
# SQL-vulnerable endpoints
|
||||
sql_endpoints = ['/api/search', '/api/sql', '/api/database']
|
||||
|
||||
base_path = urlparse(path).path
|
||||
if base_path not in sql_endpoints:
|
||||
return False
|
||||
|
||||
try:
|
||||
# Get query parameters
|
||||
query_string = self._parse_query_string()
|
||||
|
||||
# Log SQL injection attempt
|
||||
client_ip = self._get_client_ip()
|
||||
user_agent = self._get_user_agent()
|
||||
|
||||
# Always check for SQL injection patterns
|
||||
error_msg, content_type, status_code = generate_sql_error_response(query_string or "")
|
||||
|
||||
if error_msg:
|
||||
# SQL injection detected - log and return error
|
||||
self.access_logger.warning(f"[SQL INJECTION DETECTED] {client_ip} - {base_path} - Query: {query_string[:100] if query_string else 'empty'}")
|
||||
self.send_response(status_code)
|
||||
self.send_header('Content-type', content_type)
|
||||
self.end_headers()
|
||||
self.wfile.write(error_msg.encode())
|
||||
else:
|
||||
# No injection detected - return fake data
|
||||
self.access_logger.info(f"[SQL ENDPOINT] {client_ip} - {base_path} - Query: {query_string[:100] if query_string else 'empty'}")
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
response_data = get_sql_response_with_data(base_path, query_string or "")
|
||||
self.wfile.write(response_data.encode())
|
||||
|
||||
return True
|
||||
|
||||
except BrokenPipeError:
|
||||
# Client disconnected
|
||||
return True
|
||||
except Exception as e:
|
||||
self.app_logger.error(f"Error handling SQL endpoint {path}: {str(e)}")
|
||||
# Still send a response even on error
|
||||
try:
|
||||
self.send_response(500)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
self.wfile.write(b'{"error": "Internal server error"}')
|
||||
except:
|
||||
pass
|
||||
return True
|
||||
|
||||
def generate_page(self, seed: str) -> str:
|
||||
"""Generate a webpage containing random links or canary token"""
|
||||
@@ -209,6 +274,68 @@ class Handler(BaseHTTPRequestHandler):
|
||||
user_agent = self._get_user_agent()
|
||||
post_data = ""
|
||||
|
||||
from urllib.parse import urlparse
|
||||
base_path = urlparse(self.path).path
|
||||
|
||||
if base_path in ['/api/search', '/api/sql', '/api/database']:
|
||||
content_length = int(self.headers.get('Content-Length', 0))
|
||||
if content_length > 0:
|
||||
post_data = self.rfile.read(content_length).decode('utf-8', errors="replace")
|
||||
|
||||
self.access_logger.info(f"[SQL ENDPOINT POST] {client_ip} - {base_path} - Data: {post_data[:100] if post_data else 'empty'}")
|
||||
|
||||
error_msg, content_type, status_code = generate_sql_error_response(post_data)
|
||||
|
||||
try:
|
||||
if error_msg:
|
||||
self.access_logger.warning(f"[SQL INJECTION DETECTED POST] {client_ip} - {base_path}")
|
||||
self.send_response(status_code)
|
||||
self.send_header('Content-type', content_type)
|
||||
self.end_headers()
|
||||
self.wfile.write(error_msg.encode())
|
||||
else:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'application/json')
|
||||
self.end_headers()
|
||||
response_data = get_sql_response_with_data(base_path, post_data)
|
||||
self.wfile.write(response_data.encode())
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
self.app_logger.error(f"Error in SQL POST handler: {str(e)}")
|
||||
return
|
||||
|
||||
if base_path == '/api/contact':
|
||||
content_length = int(self.headers.get('Content-Length', 0))
|
||||
if content_length > 0:
|
||||
post_data = self.rfile.read(content_length).decode('utf-8', errors="replace")
|
||||
|
||||
parsed_data = {}
|
||||
for pair in post_data.split('&'):
|
||||
if '=' in pair:
|
||||
key, value = pair.split('=', 1)
|
||||
from urllib.parse import unquote_plus
|
||||
parsed_data[unquote_plus(key)] = unquote_plus(value)
|
||||
|
||||
xss_detected = any(detect_xss_pattern(v) for v in parsed_data.values())
|
||||
|
||||
if xss_detected:
|
||||
self.access_logger.warning(f"[XSS ATTEMPT DETECTED] {client_ip} - {base_path} - Data: {post_data[:200]}")
|
||||
else:
|
||||
self.access_logger.info(f"[XSS ENDPOINT POST] {client_ip} - {base_path}")
|
||||
|
||||
try:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
self.end_headers()
|
||||
response_html = generate_xss_response(parsed_data)
|
||||
self.wfile.write(response_html.encode())
|
||||
except BrokenPipeError:
|
||||
pass
|
||||
except Exception as e:
|
||||
self.app_logger.error(f"Error in XSS POST handler: {str(e)}")
|
||||
return
|
||||
|
||||
self.access_logger.warning(f"[LOGIN ATTEMPT] {client_ip} - {self.path} - {user_agent[:50]}")
|
||||
|
||||
content_length = int(self.headers.get('Content-Length', 0))
|
||||
@@ -250,6 +377,10 @@ class Handler(BaseHTTPRequestHandler):
|
||||
def serve_special_path(self, path: str) -> bool:
|
||||
"""Serve special paths like robots.txt, API endpoints, etc."""
|
||||
|
||||
# Check SQL injection honeypot endpoints first
|
||||
if self._handle_sql_endpoint(path):
|
||||
return True
|
||||
|
||||
try:
|
||||
if path == '/robots.txt':
|
||||
self.send_response(200)
|
||||
@@ -287,7 +418,28 @@ class Handler(BaseHTTPRequestHandler):
|
||||
self.wfile.write(html_templates.login_form().encode())
|
||||
return True
|
||||
|
||||
# WordPress login page
|
||||
if path in ['/users', '/user', '/database', '/db', '/search']:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
self.end_headers()
|
||||
self.wfile.write(html_templates.product_search().encode())
|
||||
return True
|
||||
|
||||
if path in ['/info', '/input', '/contact', '/feedback', '/comment']:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
self.end_headers()
|
||||
self.wfile.write(html_templates.input_form().encode())
|
||||
return True
|
||||
|
||||
if path == '/server':
|
||||
error_html, content_type = generate_server_error()
|
||||
self.send_response(500)
|
||||
self.send_header('Content-type', content_type)
|
||||
self.end_headers()
|
||||
self.wfile.write(error_html.encode())
|
||||
return True
|
||||
|
||||
if path in ['/wp-login.php', '/wp-login', '/wp-admin', '/wp-admin/']:
|
||||
self.send_response(200)
|
||||
self.send_header('Content-type', 'text/html')
|
||||
|
||||
@@ -8,7 +8,7 @@ Run this file to start the server.
|
||||
import sys
|
||||
from http.server import HTTPServer
|
||||
|
||||
from config import Config
|
||||
from config import get_config
|
||||
from tracker import AccessTracker
|
||||
from analyzer import Analyzer
|
||||
from handler import Handler
|
||||
@@ -21,24 +21,29 @@ def print_usage():
|
||||
print(f'Usage: {sys.argv[0]} [FILE]\n')
|
||||
print('FILE is file containing a list of webpage names to serve, one per line.')
|
||||
print('If no file is provided, random links will be generated.\n')
|
||||
print('Environment Variables:')
|
||||
print(' PORT - Server port (default: 5000)')
|
||||
print(' DELAY - Response delay in ms (default: 100)')
|
||||
print(' LINKS_MIN_LENGTH - Min link length (default: 5)')
|
||||
print(' LINKS_MAX_LENGTH - Max link length (default: 15)')
|
||||
print(' LINKS_MIN_PER_PAGE - Min links per page (default: 10)')
|
||||
print(' LINKS_MAX_PER_PAGE - Max links per page (default: 15)')
|
||||
print(' MAX_COUNTER - Max counter value (default: 10)')
|
||||
print(' CANARY_TOKEN_URL - Canary token URL to display')
|
||||
print(' CANARY_TOKEN_TRIES - Number of tries before showing token (default: 10)')
|
||||
print(' DASHBOARD_SECRET_PATH - Secret path for dashboard (auto-generated if not set)')
|
||||
print(' PROBABILITY_ERROR_CODES - Probability (0-100) to return HTTP error codes (default: 0)')
|
||||
print(' CHAR_SPACE - Characters for random links')
|
||||
print(' SERVER_HEADER - HTTP Server header for deception (default: Apache/2.2.22 (Ubuntu))')
|
||||
print(' DATABASE_PATH - Path to SQLite database (default: data/krawl.db)')
|
||||
print(' DATABASE_RETENTION_DAYS - Days to retain database records (default: 30)')
|
||||
print(' TIMEZONE - IANA timezone for logs/dashboard (e.g., America/New_York, Europe/Rome)')
|
||||
print(' If not set, system timezone will be used')
|
||||
print('Configuration:')
|
||||
print(' Configuration is loaded from a YAML file (default: config.yaml)')
|
||||
print(' Set CONFIG_LOCATION environment variable to use a different file.\n')
|
||||
print(' Example config.yaml structure:')
|
||||
print(' server:')
|
||||
print(' port: 5000')
|
||||
print(' delay: 100')
|
||||
print(' timezone: null # or "America/New_York"')
|
||||
print(' links:')
|
||||
print(' min_length: 5')
|
||||
print(' max_length: 15')
|
||||
print(' min_per_page: 10')
|
||||
print(' max_per_page: 15')
|
||||
print(' canary:')
|
||||
print(' token_url: null')
|
||||
print(' token_tries: 10')
|
||||
print(' dashboard:')
|
||||
print(' secret_path: null # auto-generated if not set')
|
||||
print(' database:')
|
||||
print(' path: "data/krawl.db"')
|
||||
print(' retention_days: 30')
|
||||
print(' behavior:')
|
||||
print(' probability_error_codes: 0')
|
||||
|
||||
|
||||
def main():
|
||||
@@ -47,19 +52,17 @@ def main():
|
||||
print_usage()
|
||||
exit(0)
|
||||
|
||||
config = Config.from_env()
|
||||
|
||||
config = get_config()
|
||||
|
||||
# Get timezone configuration
|
||||
tz = config.get_timezone()
|
||||
|
||||
|
||||
# Initialize logging with timezone
|
||||
initialize_logging(timezone=tz)
|
||||
app_logger = get_app_logger()
|
||||
access_logger = get_access_logger()
|
||||
credential_logger = get_credential_logger()
|
||||
|
||||
config = Config.from_env()
|
||||
|
||||
# Initialize database for persistent storage
|
||||
try:
|
||||
initialize_database(config.database_path)
|
||||
|
||||
65
src/server_errors.py
Normal file
65
src/server_errors.py
Normal file
@@ -0,0 +1,65 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import random
|
||||
from wordlists import get_wordlists
|
||||
|
||||
|
||||
def generate_server_error() -> tuple[str, str]:
|
||||
wl = get_wordlists()
|
||||
server_errors = wl.server_errors
|
||||
|
||||
if not server_errors:
|
||||
return ("500 Internal Server Error", "text/html")
|
||||
|
||||
server_type = random.choice(list(server_errors.keys()))
|
||||
server_config = server_errors[server_type]
|
||||
|
||||
error_codes = {
|
||||
400: "Bad Request",
|
||||
401: "Unauthorized",
|
||||
403: "Forbidden",
|
||||
404: "Not Found",
|
||||
500: "Internal Server Error",
|
||||
502: "Bad Gateway",
|
||||
503: "Service Unavailable"
|
||||
}
|
||||
|
||||
code = random.choice(list(error_codes.keys()))
|
||||
message = error_codes[code]
|
||||
|
||||
template = server_config.get('template', '')
|
||||
version = random.choice(server_config.get('versions', ['1.0']))
|
||||
|
||||
html = template.replace('{code}', str(code))
|
||||
html = html.replace('{message}', message)
|
||||
html = html.replace('{version}', version)
|
||||
|
||||
if server_type == 'apache':
|
||||
os = random.choice(server_config.get('os', ['Ubuntu']))
|
||||
html = html.replace('{os}', os)
|
||||
html = html.replace('{host}', 'localhost')
|
||||
|
||||
return (html, "text/html")
|
||||
|
||||
|
||||
def get_server_header(server_type: str = None) -> str:
|
||||
wl = get_wordlists()
|
||||
server_errors = wl.server_errors
|
||||
|
||||
if not server_errors:
|
||||
return "nginx/1.18.0"
|
||||
|
||||
if not server_type:
|
||||
server_type = random.choice(list(server_errors.keys()))
|
||||
|
||||
server_config = server_errors.get(server_type, {})
|
||||
version = random.choice(server_config.get('versions', ['1.0']))
|
||||
|
||||
server_headers = {
|
||||
'nginx': f"nginx/{version}",
|
||||
'apache': f"Apache/{version}",
|
||||
'iis': f"Microsoft-IIS/{version}",
|
||||
'tomcat': f"Apache-Coyote/1.1"
|
||||
}
|
||||
|
||||
return server_headers.get(server_type, "nginx/1.18.0")
|
||||
112
src/sql_errors.py
Normal file
112
src/sql_errors.py
Normal file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import random
|
||||
import re
|
||||
from typing import Optional, Tuple
|
||||
from wordlists import get_wordlists
|
||||
|
||||
|
||||
def detect_sql_injection_pattern(query_string: str) -> Optional[str]:
|
||||
if not query_string:
|
||||
return None
|
||||
|
||||
query_lower = query_string.lower()
|
||||
|
||||
patterns = {
|
||||
'quote': [r"'", r'"', r'`'],
|
||||
'comment': [r'--', r'#', r'/\*', r'\*/'],
|
||||
'union': [r'\bunion\b', r'\bunion\s+select\b'],
|
||||
'boolean': [r'\bor\b.*=.*', r'\band\b.*=.*', r"'.*or.*'.*=.*'"],
|
||||
'time_based': [r'\bsleep\b', r'\bwaitfor\b', r'\bdelay\b', r'\bbenchmark\b'],
|
||||
'stacked': [r';.*select', r';.*drop', r';.*insert', r';.*update', r';.*delete'],
|
||||
'command': [r'\bexec\b', r'\bexecute\b', r'\bxp_cmdshell\b'],
|
||||
'info_schema': [r'information_schema', r'table_schema', r'table_name'],
|
||||
}
|
||||
|
||||
for injection_type, pattern_list in patterns.items():
|
||||
for pattern in pattern_list:
|
||||
if re.search(pattern, query_lower):
|
||||
return injection_type
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def get_random_sql_error(db_type: str = None, injection_type: str = None) -> Tuple[str, str]:
|
||||
wl = get_wordlists()
|
||||
sql_errors = wl.sql_errors
|
||||
|
||||
if not sql_errors:
|
||||
return ("Database error occurred", "text/plain")
|
||||
|
||||
if not db_type:
|
||||
db_type = random.choice(list(sql_errors.keys()))
|
||||
|
||||
db_errors = sql_errors.get(db_type, {})
|
||||
|
||||
if injection_type and injection_type in db_errors:
|
||||
errors = db_errors[injection_type]
|
||||
elif 'generic' in db_errors:
|
||||
errors = db_errors['generic']
|
||||
else:
|
||||
all_errors = []
|
||||
for error_list in db_errors.values():
|
||||
if isinstance(error_list, list):
|
||||
all_errors.extend(error_list)
|
||||
errors = all_errors if all_errors else ["Database error occurred"]
|
||||
|
||||
error_message = random.choice(errors) if errors else "Database error occurred"
|
||||
|
||||
if '{table}' in error_message:
|
||||
tables = ['users', 'products', 'orders', 'customers', 'accounts', 'sessions']
|
||||
error_message = error_message.replace('{table}', random.choice(tables))
|
||||
|
||||
if '{column}' in error_message:
|
||||
columns = ['id', 'name', 'email', 'password', 'username', 'created_at']
|
||||
error_message = error_message.replace('{column}', random.choice(columns))
|
||||
|
||||
return (error_message, "text/plain")
|
||||
|
||||
|
||||
def generate_sql_error_response(query_string: str, db_type: str = None) -> Tuple[str, str, int]:
|
||||
injection_type = detect_sql_injection_pattern(query_string)
|
||||
|
||||
if not injection_type:
|
||||
return (None, None, None)
|
||||
|
||||
error_message, content_type = get_random_sql_error(db_type, injection_type)
|
||||
|
||||
status_code = 500
|
||||
|
||||
if random.random() < 0.3:
|
||||
status_code = 200
|
||||
|
||||
return (error_message, content_type, status_code)
|
||||
|
||||
|
||||
def get_sql_response_with_data(path: str, params: str) -> str:
|
||||
import json
|
||||
from generators import random_username, random_email, random_password
|
||||
|
||||
injection_type = detect_sql_injection_pattern(params)
|
||||
|
||||
if injection_type in ['union', 'boolean', 'stacked']:
|
||||
data = {
|
||||
"success": True,
|
||||
"results": [
|
||||
{
|
||||
"id": i,
|
||||
"username": random_username(),
|
||||
"email": random_email(),
|
||||
"password_hash": random_password(),
|
||||
"role": random.choice(["admin", "user", "moderator"])
|
||||
}
|
||||
for i in range(1, random.randint(2, 5))
|
||||
]
|
||||
}
|
||||
return json.dumps(data, indent=2)
|
||||
|
||||
return json.dumps({
|
||||
"success": True,
|
||||
"message": "Query executed successfully",
|
||||
"results": []
|
||||
}, indent=2)
|
||||
66
src/templates/html/generic_search.html
Normal file
66
src/templates/html/generic_search.html
Normal file
@@ -0,0 +1,66 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Search</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
max-width: 600px;
|
||||
margin: 50px auto;
|
||||
padding: 20px;
|
||||
}
|
||||
h1 {
|
||||
color: #333;
|
||||
}
|
||||
input {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
margin: 10px 0;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
button {
|
||||
background: #4CAF50;
|
||||
color: white;
|
||||
padding: 10px 20px;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
button:hover {
|
||||
background: #45a049;
|
||||
}
|
||||
#results {
|
||||
margin-top: 20px;
|
||||
padding: 10px;
|
||||
border: 1px solid #ddd;
|
||||
background: #f9f9f9;
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Search</h1>
|
||||
<form id="searchForm">
|
||||
<input type="text" id="searchQuery" placeholder="Enter search query..." required>
|
||||
<button type="submit">Search</button>
|
||||
</form>
|
||||
<div id="results"></div>
|
||||
|
||||
<script>
|
||||
document.getElementById('searchForm').addEventListener('submit', async (e) => {
|
||||
e.preventDefault();
|
||||
const query = document.getElementById('searchQuery').value;
|
||||
const results = document.getElementById('results');
|
||||
|
||||
try {
|
||||
const response = await fetch(`/api/search?q=${encodeURIComponent(query)}`);
|
||||
const text = await response.text();
|
||||
results.innerHTML = `<pre>${text}</pre>`;
|
||||
results.style.display = 'block';
|
||||
} catch (err) {
|
||||
results.innerHTML = `<p>Error: ${err.message}</p>`;
|
||||
results.style.display = 'block';
|
||||
}
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
74
src/templates/html/input_form.html
Normal file
74
src/templates/html/input_form.html
Normal file
@@ -0,0 +1,74 @@
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Contact</title>
|
||||
<style>
|
||||
body {
|
||||
font-family: Arial, sans-serif;
|
||||
max-width: 500px;
|
||||
margin: 50px auto;
|
||||
padding: 20px;
|
||||
}
|
||||
h1 {
|
||||
color: #333;
|
||||
}
|
||||
input, textarea {
|
||||
width: 100%;
|
||||
padding: 8px;
|
||||
margin: 10px 0;
|
||||
border: 1px solid #ddd;
|
||||
box-sizing: border-box;
|
||||
}
|
||||
textarea {
|
||||
min-height: 100px;
|
||||
}
|
||||
button {
|
||||
background: #4CAF50;
|
||||
color: white;
|
||||
padding: 10px 20px;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
button:hover {
|
||||
background: #45a049;
|
||||
}
|
||||
#response {
|
||||
margin-top: 20px;
|
||||
padding: 10px;
|
||||
display: none;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<h1>Contact</h1>
|
||||
<form id="contactForm">
|
||||
<input type="text" name="name" placeholder="Name" required>
|
||||
<input type="email" name="email" placeholder="Email" required>
|
||||
<textarea name="message" placeholder="Message" required></textarea>
|
||||
<button type="submit">Submit</button>
|
||||
</form>
|
||||
<div id="response"></div>
|
||||
|
||||
<script>
|
||||
document.getElementById('contactForm').addEventListener('submit', function(e) {
|
||||
e.preventDefault();
|
||||
const formData = new FormData(this);
|
||||
|
||||
fetch('/api/contact', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
|
||||
body: new URLSearchParams(formData)
|
||||
})
|
||||
.then(response => response.text())
|
||||
.then(text => {
|
||||
document.getElementById('response').innerHTML = text;
|
||||
document.getElementById('response').style.display = 'block';
|
||||
})
|
||||
.catch(error => {
|
||||
document.getElementById('response').innerHTML = 'Error: ' + error.message;
|
||||
document.getElementById('response').style.display = 'block';
|
||||
});
|
||||
});
|
||||
</script>
|
||||
</body>
|
||||
</html>
|
||||
@@ -11,8 +11,18 @@ Disallow: /login/
|
||||
Disallow: /admin/login
|
||||
Disallow: /phpMyAdmin/
|
||||
Disallow: /admin/login.php
|
||||
Disallow: /users
|
||||
Disallow: /search
|
||||
Disallow: /contact
|
||||
Disallow: /info
|
||||
Disallow: /input
|
||||
Disallow: /feedback
|
||||
Disallow: /server
|
||||
Disallow: /api/v1/users
|
||||
Disallow: /api/v2/secrets
|
||||
Disallow: /api/search
|
||||
Disallow: /api/sql
|
||||
Disallow: /api/database
|
||||
Disallow: /.env
|
||||
Disallow: /credentials.txt
|
||||
Disallow: /passwords.txt
|
||||
|
||||
@@ -50,3 +50,13 @@ def directory_listing(path: str, dirs: list, files: list) -> str:
|
||||
rows += row_template.format(href=f, name=f, date="2024-12-01 14:22", size=size)
|
||||
|
||||
return load_template("directory_listing", path=path, rows=rows)
|
||||
|
||||
|
||||
def product_search() -> str:
|
||||
"""Generate product search page with SQL injection honeypot"""
|
||||
return load_template("generic_search")
|
||||
|
||||
|
||||
def input_form() -> str:
|
||||
"""Generate input form page for XSS honeypot"""
|
||||
return load_template("input_form")
|
||||
|
||||
@@ -6,7 +6,7 @@ from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
import re
|
||||
import urllib.parse
|
||||
|
||||
from wordlists import get_wordlists
|
||||
from database import get_database, DatabaseManager
|
||||
|
||||
|
||||
@@ -37,14 +37,19 @@ class AccessTracker:
|
||||
'burp', 'zap', 'w3af', 'metasploit', 'nuclei', 'gobuster', 'dirbuster'
|
||||
]
|
||||
|
||||
# Common attack types such as xss, shell injection, probes
|
||||
self.attack_types = {
|
||||
'path_traversal': r'\.\.',
|
||||
'sql_injection': r"('|--|;|\bOR\b|\bUNION\b|\bSELECT\b|\bDROP\b)",
|
||||
'xss_attempt': r'(<script|javascript:|onerror=|onload=)',
|
||||
'common_probes': r'(wp-admin|phpmyadmin|\.env|\.git|/admin|/config)',
|
||||
'shell_injection': r'(\||;|`|\$\(|&&)',
|
||||
}
|
||||
# Load attack patterns from wordlists
|
||||
wl = get_wordlists()
|
||||
self.attack_types = wl.attack_patterns
|
||||
|
||||
# Fallback if wordlists not loaded
|
||||
if not self.attack_types:
|
||||
self.attack_types = {
|
||||
'path_traversal': r'\.\.',
|
||||
'sql_injection': r"('|--|;|\bOR\b|\bUNION\b|\bSELECT\b|\bDROP\b)",
|
||||
'xss_attempt': r'(<script|javascript:|onerror=|onload=)',
|
||||
'common_probes': r'(wp-admin|phpmyadmin|\.env|\.git|/admin|/config)',
|
||||
'shell_injection': r'(\||;|`|\$\(|&&)',
|
||||
}
|
||||
|
||||
# Track IPs that accessed honeypot paths from robots.txt
|
||||
self.honeypot_triggered: Dict[str, List[str]] = defaultdict(list)
|
||||
|
||||
@@ -114,6 +114,17 @@ class Wordlists:
|
||||
return self._data.get("error_codes", [])
|
||||
|
||||
@property
|
||||
def sql_errors(self):
|
||||
return self._data.get("sql_errors", {})
|
||||
|
||||
@property
|
||||
def attack_patterns(self):
|
||||
return self._data.get("attack_patterns", {})
|
||||
|
||||
@property
|
||||
def server_errors(self):
|
||||
return self._data.get("server_errors", {})
|
||||
|
||||
def server_headers(self):
|
||||
return self._data.get("server_headers", [])
|
||||
|
||||
|
||||
73
src/xss_detector.py
Normal file
73
src/xss_detector.py
Normal file
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python3
|
||||
|
||||
import re
|
||||
from typing import Optional
|
||||
from wordlists import get_wordlists
|
||||
|
||||
|
||||
def detect_xss_pattern(input_string: str) -> bool:
|
||||
if not input_string:
|
||||
return False
|
||||
|
||||
wl = get_wordlists()
|
||||
xss_pattern = wl.attack_patterns.get('xss_attempt', '')
|
||||
|
||||
if not xss_pattern:
|
||||
xss_pattern = r'(<script|</script|javascript:|onerror=|onload=|onclick=|<iframe|<img|<svg|eval\(|alert\()'
|
||||
|
||||
return bool(re.search(xss_pattern, input_string, re.IGNORECASE))
|
||||
|
||||
|
||||
def generate_xss_response(input_data: dict) -> str:
|
||||
xss_detected = False
|
||||
reflected_content = []
|
||||
|
||||
for key, value in input_data.items():
|
||||
if detect_xss_pattern(value):
|
||||
xss_detected = True
|
||||
reflected_content.append(f"<p><strong>{key}:</strong> {value}</p>")
|
||||
|
||||
if xss_detected:
|
||||
html = f"""
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Submission Received</title>
|
||||
<style>
|
||||
body {{ font-family: Arial, sans-serif; max-width: 600px; margin: 50px auto; padding: 20px; }}
|
||||
.success {{ background: #d4edda; padding: 20px; border-radius: 8px; border: 1px solid #c3e6cb; }}
|
||||
h2 {{ color: #155724; }}
|
||||
p {{ margin: 10px 0; }}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="success">
|
||||
<h2>Thank you for your submission!</h2>
|
||||
<p>We have received your information:</p>
|
||||
{''.join(reflected_content)}
|
||||
<p><em>We will get back to you shortly.</em></p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
return html
|
||||
|
||||
return """
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>Submission Received</title>
|
||||
<style>
|
||||
body { font-family: Arial, sans-serif; max-width: 600px; margin: 50px auto; padding: 20px; }
|
||||
.success { background: #d4edda; padding: 20px; border-radius: 8px; border: 1px solid #c3e6cb; }
|
||||
h2 { color: #155724; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="success">
|
||||
<h2>Thank you for your submission!</h2>
|
||||
<p>Your message has been received and we will respond soon.</p>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
||||
"""
|
||||
Reference in New Issue
Block a user