fix: SQLite database locked errors + add error status for 4xx/5xx
SQLite locking: - Enable WAL journal mode in init_db (readers don't block writers) - Set busy_timeout=30000ms in init_db - Add timeout=30 to every aiosqlite.connect() across db.py, validator.py, enricher.py, main.py so connections wait up to 30s instead of crashing Error status: - 4xx/5xx HTTP responses are now prescreen_status='error' (server alive but broken/blocking) instead of 'live' - Added 'error' counter to validator stats and orange Error stat box in UI - Added ps-error CSS class (orange) and filter option in Browse tab Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
23
app/db.py
23
app/db.py
@@ -101,7 +101,10 @@ _total_cache: int = 0
|
||||
|
||||
|
||||
async def init_db():
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
# WAL mode: concurrent reads don't block on writes; write lock held briefly
|
||||
await db.execute("PRAGMA journal_mode=WAL")
|
||||
await db.execute("PRAGMA busy_timeout=30000")
|
||||
await db.executescript(SCHEMA)
|
||||
# Run migrations (safe to re-run — silently skips existing columns)
|
||||
for sql in _MIGRATIONS:
|
||||
@@ -237,7 +240,7 @@ async def get_domains(tld=None, page=1, limit=100, alpha_only=False, no_sld=Fals
|
||||
return total, []
|
||||
|
||||
placeholders = ",".join("?" * len(domain_list))
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
db.row_factory = aiosqlite.Row
|
||||
async with db.execute(
|
||||
f"SELECT * FROM enriched_domains WHERE domain IN ({placeholders})",
|
||||
@@ -289,7 +292,7 @@ async def get_stats():
|
||||
loop = asyncio.get_event_loop()
|
||||
_total_cache, _tld_cache = await loop.run_in_executor(None, _tld_stats_sync)
|
||||
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
async with db.execute("SELECT COUNT(*) FROM enriched_domains") as cur:
|
||||
enriched = (await cur.fetchone())[0]
|
||||
threshold = int(os.getenv("SCORE_THRESHOLD", "60"))
|
||||
@@ -351,7 +354,7 @@ async def get_enriched(min_score=0, cms=None, country=None, kit_digital=None,
|
||||
conditions.append("site_type = ?")
|
||||
params.append(site_type)
|
||||
where = "WHERE " + " AND ".join(conditions)
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
db.row_factory = aiosqlite.Row
|
||||
async with db.execute(
|
||||
f"SELECT * FROM enriched_domains {where} ORDER BY score DESC LIMIT ? OFFSET ?",
|
||||
@@ -366,7 +369,7 @@ async def get_enriched(min_score=0, cms=None, country=None, kit_digital=None,
|
||||
|
||||
|
||||
async def queue_ai(domains: list[str], language: str = "ES"):
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
await db.executemany(
|
||||
"""INSERT INTO ai_queue (domain, language) VALUES (?, ?)
|
||||
ON CONFLICT(domain) DO UPDATE SET language=excluded.language, status='pending'""",
|
||||
@@ -376,7 +379,7 @@ async def queue_ai(domains: list[str], language: str = "ES"):
|
||||
|
||||
|
||||
async def get_ai_queue_status():
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
async with db.execute("SELECT status, COUNT(*) FROM ai_queue GROUP BY status") as cur:
|
||||
rows = {r[0]: r[1] async for r in cur}
|
||||
return {
|
||||
@@ -390,7 +393,7 @@ async def get_ai_queue_status():
|
||||
|
||||
async def save_ai_assessment(domain: str, assessment: dict, site_analysis: dict = None):
|
||||
import json as _json
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
# Upsert into enriched_domains (domain may not exist yet if assessed before full enrichment)
|
||||
await db.execute(
|
||||
"""INSERT INTO enriched_domains (domain) VALUES (?) ON CONFLICT(domain) DO NOTHING""",
|
||||
@@ -445,7 +448,7 @@ async def save_ai_assessment(domain: str, assessment: dict, site_analysis: dict
|
||||
|
||||
async def save_prescreen_results(results: list[dict]):
|
||||
"""Upsert prescreen HTTP results and/or DeepSeek niche/type classifications."""
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
for r in results:
|
||||
domain = r.get("domain")
|
||||
if not domain:
|
||||
@@ -477,7 +480,7 @@ async def save_prescreen_results(results: list[dict]):
|
||||
|
||||
|
||||
async def queue_domains(domains: list[str]):
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
await db.executemany(
|
||||
"INSERT OR IGNORE INTO job_queue (domain) VALUES (?)",
|
||||
[(d,) for d in domains],
|
||||
@@ -486,7 +489,7 @@ async def queue_domains(domains: list[str]):
|
||||
|
||||
|
||||
async def get_queue_status():
|
||||
async with aiosqlite.connect(SQLITE_PATH) as db:
|
||||
async with aiosqlite.connect(SQLITE_PATH, timeout=30) as db:
|
||||
async with db.execute("SELECT status, COUNT(*) FROM job_queue GROUP BY status") as cur:
|
||||
rows = {r[0]: r[1] async for r in cur}
|
||||
pending = rows.get("pending", 0)
|
||||
|
||||
Reference in New Issue
Block a user