This commit is contained in:
Yuvi9587 2025-08-27 07:21:30 -07:00
parent f8b150dfdb
commit cc3565b12b
7 changed files with 1451 additions and 181 deletions

249
src/core/bunkr_client.py Normal file
View File

@ -0,0 +1,249 @@
import logging
import os
import re
import requests
import html
import time
import datetime
import urllib.parse
import json
import random
import binascii
import itertools
class MockMessage:
Directory = 1
Url = 2
Version = 3
class AlbumException(Exception): pass
class ExtractionError(AlbumException): pass
class HttpError(ExtractionError):
def __init__(self, message="", response=None):
self.response = response
self.status = response.status_code if response is not None else 0
super().__init__(message)
class ControlException(AlbumException): pass
class AbortExtraction(ExtractionError, ControlException): pass
try:
re_compile = re._compiler.compile
except AttributeError:
re_compile = re.sre_compile.compile
HTML_RE = re_compile(r"<[^>]+>")
def extr(txt, begin, end, default=""):
try:
first = txt.index(begin) + len(begin)
return txt[first:txt.index(end, first)]
except Exception: return default
def extract_iter(txt, begin, end, pos=None):
try:
index = txt.index
lbeg = len(begin)
lend = len(end)
while True:
first = index(begin, pos) + lbeg
last = index(end, first)
pos = last + lend
yield txt[first:last]
except Exception: return
def split_html(txt):
try: return [html.unescape(x).strip() for x in HTML_RE.split(txt) if x and not x.isspace()]
except TypeError: return []
def parse_datetime(date_string, format="%Y-%m-%dT%H:%M:%S%z", utcoffset=0):
try:
d = datetime.datetime.strptime(date_string, format)
o = d.utcoffset()
if o is not None: d = d.replace(tzinfo=None, microsecond=0) - o
else:
if d.microsecond: d = d.replace(microsecond=0)
if utcoffset: d += datetime.timedelta(0, utcoffset * -3600)
return d
except (TypeError, IndexError, KeyError, ValueError, OverflowError): return None
unquote = urllib.parse.unquote
unescape = html.unescape
# --- From: util.py ---
def decrypt_xor(encrypted, key, base64=True, fromhex=False):
if base64: encrypted = binascii.a2b_base64(encrypted)
if fromhex: encrypted = bytes.fromhex(encrypted.decode())
div = len(key)
return bytes([encrypted[i] ^ key[i % div] for i in range(len(encrypted))]).decode()
def advance(iterable, num):
iterator = iter(iterable)
next(itertools.islice(iterator, num, num), None)
return iterator
def json_loads(s): return json.loads(s)
def json_dumps(obj): return json.dumps(obj, separators=(",", ":"))
# --- From: common.py ---
class Extractor:
def __init__(self, match, logger):
self.log = logger
self.url = match.string
self.match = match
self.groups = match.groups()
self.session = requests.Session()
self.session.headers["User-Agent"] = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:102.0) Gecko/20100101 Firefox/102.0"
@classmethod
def from_url(cls, url, logger):
if isinstance(cls.pattern, str): cls.pattern = re.compile(cls.pattern)
match = cls.pattern.match(url)
return cls(match, logger) if match else None
def __iter__(self): return self.items()
def items(self): yield MockMessage.Version, 1
def request(self, url, method="GET", fatal=True, **kwargs):
tries = 1
while True:
try:
response = self.session.request(method, url, **kwargs)
if response.status_code < 400: return response
msg = f"'{response.status_code} {response.reason}' for '{response.url}'"
except requests.exceptions.RequestException as exc:
msg = str(exc)
self.log.info("%s (retrying...)", msg)
if tries > 4: break
time.sleep(tries)
tries += 1
if not fatal: return None
raise HttpError(msg)
def request_json(self, url, **kwargs):
response = self.request(url, **kwargs)
try: return json_loads(response.text)
except Exception as exc:
self.log.warning("%s: %s", exc.__class__.__name__, exc)
if not kwargs.get("fatal", True): return {}
raise
# --- From: bunkr.py (Adapted) ---
BASE_PATTERN_BUNKR = r"(?:https?://)?(?:[a-zA-Z0-9-]+\.)?(bunkr\.(?:si|la|ws|red|black|media|site|is|to|ac|cr|ci|fi|pk|ps|sk|ph|su)|bunkrr\.ru)"
DOMAINS = ["bunkr.si", "bunkr.ws", "bunkr.la", "bunkr.red", "bunkr.black", "bunkr.media", "bunkr.site"]
CF_DOMAINS = set()
class BunkrAlbumExtractor(Extractor):
category = "bunkr"
root = "https://bunkr.si"
root_dl = "https://get.bunkrr.su"
root_api = "https://apidl.bunkr.ru"
pattern = re.compile(BASE_PATTERN_BUNKR + r"/a/([^/?#]+)")
def __init__(self, match, logger):
super().__init__(match, logger)
domain_match = re.search(BASE_PATTERN_BUNKR, match.string)
if domain_match:
self.root = "https://" + domain_match.group(1)
self.endpoint = self.root_api + "/api/_001_v2"
self.album_id = self.groups[-1]
def items(self):
page = self.request(self.url).text
title = unescape(unescape(extr(page, 'property="og:title" content="', '"')))
items_html = list(extract_iter(page, '<div class="grid-images_box', "</a>"))
album_data = {
"album_id": self.album_id,
"album_name": title,
"count": len(items_html),
}
yield MockMessage.Directory, album_data, {}
for item_html in items_html:
try:
webpage_url = unescape(extr(item_html, ' href="', '"'))
if webpage_url.startswith("/"):
webpage_url = self.root + webpage_url
file_data = self._extract_file(webpage_url)
info = split_html(item_html)
if not file_data.get("name"):
file_data["name"] = info[-3]
yield MockMessage.Url, file_data, {}
except Exception as exc:
self.log.error("%s: %s", exc.__class__.__name__, exc)
def _extract_file(self, webpage_url):
page = self.request(webpage_url).text
data_id = extr(page, 'data-file-id="', '"')
referer = self.root_dl + "/file/" + data_id
headers = {"Referer": referer, "Origin": self.root_dl}
data = self.request_json(self.endpoint, method="POST", headers=headers, json={"id": data_id})
file_url = decrypt_xor(data["url"], f"SECRET_KEY_{data['timestamp'] // 3600}".encode()) if data.get("encrypted") else data["url"]
file_name = extr(page, "<h1", "<").rpartition(">")[2]
return {
"url": file_url,
"name": unescape(file_name),
"_http_headers": {"Referer": referer}
}
class BunkrMediaExtractor(BunkrAlbumExtractor):
pattern = re.compile(BASE_PATTERN_BUNKR + r"(/[fvid]/[^/?#]+)")
def items(self):
try:
media_path = self.groups[-1]
file_data = self._extract_file(self.root + media_path)
album_data = {"album_name": file_data.get("name", "bunkr_media"), "count": 1}
yield MockMessage.Directory, album_data, {}
yield MockMessage.Url, file_data, {}
except Exception as exc:
self.log.error("%s: %s", exc.__class__.__name__, exc)
yield MockMessage.Directory, {"album_name": "error", "count": 0}, {}
# ==============================================================================
# --- PUBLIC API FOR THE GUI ---
# ==============================================================================
def get_bunkr_extractor(url, logger):
"""Selects the correct Bunkr extractor based on the URL pattern."""
if BunkrAlbumExtractor.pattern.match(url):
logger.info("Bunkr Album URL detected.")
return BunkrAlbumExtractor.from_url(url, logger)
elif BunkrMediaExtractor.pattern.match(url):
logger.info("Bunkr Media URL detected.")
return BunkrMediaExtractor.from_url(url, logger)
else:
logger.error(f"No suitable Bunkr extractor found for URL: {url}")
return None
def fetch_bunkr_data(url, logger):
"""
Main function to be called from the GUI.
It extracts all file information from a Bunkr URL.
Returns:
A tuple of (album_name, list_of_files)
- album_name (str): The name of the album.
- list_of_files (list): A list of dicts, each containing 'url', 'name', and '_http_headers'.
Returns (None, None) on failure.
"""
extractor = get_bunkr_extractor(url, logger)
if not extractor:
return None, None
try:
album_name = "default_bunkr_album"
files_to_download = []
for msg_type, data, metadata in extractor:
if msg_type == MockMessage.Directory:
raw_album_name = data.get('album_name', 'untitled')
album_name = re.sub(r'[<>:"/\\|?*]', '_', raw_album_name).strip() or "untitled"
logger.info(f"Processing Bunkr album: {album_name}")
elif msg_type == MockMessage.Url:
# data here is the file_data dictionary
files_to_download.append(data)
if not files_to_download:
logger.warning("No files found to download from the Bunkr URL.")
return None, None
return album_name, files_to_download
except Exception as e:
logger.error(f"An error occurred while extracting Bunkr info: {e}", exc_info=True)
return None, None

147
src/core/erome_client.py Normal file
View File

@ -0,0 +1,147 @@
# src/core/erome_client.py
import os
import re
import html
import time
import urllib.parse
import requests
from datetime import datetime
# #############################################################################
# SECTION: Utility functions adapted from the original script
# #############################################################################
def extr(txt, begin, end, default=""):
"""Stripped-down version of 'extract()' to find text between two delimiters."""
try:
first = txt.index(begin) + len(begin)
return txt[first:txt.index(end, first)]
except (ValueError, IndexError):
return default
def extract_iter(txt, begin, end):
"""Yields all occurrences of text between two delimiters."""
try:
index = txt.index
lbeg = len(begin)
lend = len(end)
pos = 0
while True:
first = index(begin, pos) + lbeg
last = index(end, first)
pos = last + lend
yield txt[first:last]
except (ValueError, IndexError):
return
def nameext_from_url(url):
"""Extracts filename and extension from a URL."""
data = {}
filename = urllib.parse.unquote(url.partition("?")[0].rpartition("/")[2])
name, _, ext = filename.rpartition(".")
if name and len(ext) <= 16:
data["filename"], data["extension"] = name, ext.lower()
else:
data["filename"], data["extension"] = filename, ""
return data
def parse_timestamp(ts, default=None):
"""Creates a datetime object from a Unix timestamp."""
try:
# Use fromtimestamp for simplicity and compatibility
return datetime.fromtimestamp(int(ts))
except (ValueError, TypeError):
return default
# #############################################################################
# SECTION: Main Erome Fetching Logic
# #############################################################################
def fetch_erome_data(url, logger):
"""
Identifies and extracts all media files from an Erome album URL.
Args:
url (str): The Erome album URL (e.g., https://www.erome.com/a/albumID).
logger (function): A function to log progress messages.
Returns:
tuple: A tuple containing (album_folder_name, list_of_file_dicts).
Returns (None, []) if data extraction fails.
"""
album_id_match = re.search(r"/a/(\w+)", url)
if not album_id_match:
logger(f"Error: The URL '{url}' does not appear to be a valid Erome album link.")
return None, []
album_id = album_id_match.group(1)
page_url = f"https://www.erome.com/a/{album_id}"
session = requests.Session()
session.headers.update({
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/108.0.0.0 Safari/537.36",
"Referer": "https://www.erome.com/"
})
try:
logger(f" Fetching Erome album page: {page_url}")
# Add a loop to handle "Please wait" pages
for attempt in range(5):
response = session.get(page_url, timeout=30)
response.raise_for_status()
page_content = response.text
if "<title>Please wait a few moments</title>" in page_content:
logger(f" Cloudflare check detected. Waiting 5 seconds... (Attempt {attempt + 1}/5)")
time.sleep(5)
continue
break
else:
logger(" Error: Could not bypass Cloudflare check after several attempts.")
return None, []
title = html.unescape(extr(page_content, 'property="og:title" content="', '"'))
user = urllib.parse.unquote(extr(page_content, 'href="https://www.erome.com/', '"', default="unknown_user"))
# Sanitize title and user for folder creation
sanitized_title = re.sub(r'[<>:"/\\|?*]', '_', title).strip()
sanitized_user = re.sub(r'[<>:"/\\|?*]', '_', user).strip()
album_folder_name = f"Erome - {sanitized_user} - {sanitized_title} [{album_id}]"
urls = []
# Split the page content by media groups to find all videos
media_groups = page_content.split('<div class="media-group"')
for group in media_groups[1:]: # Skip the part before the first media group
# Prioritize <source> tag, fall back to data-src for images
video_url = extr(group, '<source src="', '"') or extr(group, 'data-src="', '"')
if video_url:
urls.append(video_url)
if not urls:
logger(" Warning: No media URLs found on the album page.")
return album_folder_name, []
logger(f" Found {len(urls)} media files in album '{title}'.")
file_list = []
for i, file_url in enumerate(urls, 1):
filename_info = nameext_from_url(file_url)
# Create a clean, descriptive filename
filename = f"{album_id}_{sanitized_title}_{i:03d}.{filename_info.get('extension', 'mp4')}"
file_data = {
"url": file_url,
"filename": filename,
"headers": {"Referer": page_url},
}
file_list.append(file_data)
return album_folder_name, file_list
except requests.exceptions.RequestException as e:
logger(f" Error fetching Erome page: {e}")
return None, []
except Exception as e:
logger(f" An unexpected error occurred during Erome extraction: {e}")
return None, []

173
src/core/saint2_client.py Normal file
View File

@ -0,0 +1,173 @@
# src/core/saint2_client.py
import os
import re as re_module
import html
import urllib.parse
import requests
# ##############################################################################
# SECTION: Utility functions adapted from the original script
# ##############################################################################
PATTERN_CACHE = {}
def re(pattern):
"""Compile a regular expression pattern and cache it."""
try:
return PATTERN_CACHE[pattern]
except KeyError:
p = PATTERN_CACHE[pattern] = re_module.compile(pattern)
return p
def extract_from(txt, pos=None, default=""):
"""Returns a function that extracts text between two delimiters from 'txt'."""
def extr(begin, end, index=txt.find, txt=txt):
nonlocal pos
try:
start_pos = pos if pos is not None else 0
first = index(begin, start_pos) + len(begin)
last = index(end, first)
if pos is not None:
pos = last + len(end)
return txt[first:last]
except (ValueError, IndexError):
return default
return extr
def nameext_from_url(url):
"""Extract filename and extension from a URL."""
data = {}
filename = urllib.parse.unquote(url.partition("?")[0].rpartition("/")[2])
name, _, ext = filename.rpartition(".")
if name and len(ext) <= 16:
data["filename"], data["extension"] = name, ext.lower()
else:
data["filename"], data["extension"] = filename, ""
return data
# ##############################################################################
# SECTION: Extractor Logic adapted for the main application
# ##############################################################################
class BaseExtractor:
"""A simplified base class for extractors."""
def __init__(self, match, session, logger):
self.match = match
self.groups = match.groups()
self.session = session
self.log = logger
def request(self, url, **kwargs):
"""Makes an HTTP request using the session."""
try:
response = self.session.get(url, **kwargs)
response.raise_for_status()
return response
except requests.exceptions.RequestException as e:
self.log(f"Error making request to {url}: {e}")
return None
class SaintAlbumExtractor(BaseExtractor):
"""Extractor for saint.su albums."""
root = "https://saint2.su"
pattern = re(r"(?:https?://)?saint\d*\.(?:su|pk|cr|to)/a/([^/?#]+)")
def items(self):
"""Generator that yields all files from an album."""
album_id = self.groups[0]
response = self.request(f"{self.root}/a/{album_id}")
if not response:
return None, []
extr = extract_from(response.text)
title = extr("<title>", "<").rpartition(" - ")[0]
self.log(f"Downloading album: {title}")
files_html = re_module.findall(r'<a class="image".*?</a>', response.text, re_module.DOTALL)
file_list = []
for i, file_html in enumerate(files_html, 1):
file_extr = extract_from(file_html)
file_url = html.unescape(file_extr("onclick=\"play('", "'"))
if not file_url:
continue
filename_info = nameext_from_url(file_url)
filename = f"{filename_info['filename']}.{filename_info['extension']}"
file_data = {
"url": file_url,
"filename": filename,
"headers": {"Referer": response.url},
}
file_list.append(file_data)
return title, file_list
class SaintMediaExtractor(BaseExtractor):
"""Extractor for single saint.su media links."""
root = "https://saint2.su"
pattern = re(r"(?:https?://)?saint\d*\.(?:su|pk|cr|to)(/(embe)?d/([^/?#]+))")
def items(self):
"""Generator that yields the single file from a media page."""
path, embed, media_id = self.groups
url = self.root + path
response = self.request(url)
if not response:
return None, []
extr = extract_from(response.text)
file_url = ""
title = extr("<title>", "<").rpartition(" - ")[0] or media_id
if embed: # /embed/ link
file_url = html.unescape(extr('<source src="', '"'))
else: # /d/ link
file_url = html.unescape(extr('<a href="', '"'))
if not file_url:
self.log("Could not find video URL on the page.")
return title, []
filename_info = nameext_from_url(file_url)
filename = f"{filename_info['filename'] or media_id}.{filename_info['extension'] or 'mp4'}"
file_data = {
"url": file_url,
"filename": filename,
"headers": {"Referer": response.url}
}
return title, [file_data]
def fetch_saint2_data(url, logger):
"""
Identifies the correct extractor for a saint2.su URL and returns the data.
Args:
url (str): The saint2.su URL.
logger (function): A function to log progress messages.
Returns:
tuple: A tuple containing (album_title, list_of_file_dicts).
Returns (None, []) if no data could be fetched.
"""
extractors = [SaintMediaExtractor, SaintAlbumExtractor]
session = requests.Session()
session.headers.update({
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
})
for extractor_cls in extractors:
match = extractor_cls.pattern.match(url)
if match:
extractor = extractor_cls(match, session, logger)
album_title, files = extractor.items()
# Sanitize the album title to be a valid folder name
sanitized_title = re_module.sub(r'[<>:"/\\|?*]', '_', album_title) if album_title else "saint2_download"
return sanitized_title, files
logger(f"Error: The URL '{url}' does not match a known saint2 pattern.")
return None, []

125
src/services/updater.py Normal file
View File

@ -0,0 +1,125 @@
import sys
import os
import requests
import subprocess # Keep this for now, though it's not used in the final command
from packaging.version import parse as parse_version
from PyQt5.QtCore import QThread, pyqtSignal
# Constants for the updater
GITHUB_REPO_URL = "https://api.github.com/repos/Yuvi9587/Kemono-Downloader/releases/latest"
EXE_NAME = "Kemono.Downloader.exe"
class UpdateChecker(QThread):
"""Checks for a new version on GitHub in a background thread."""
update_available = pyqtSignal(str, str) # new_version, download_url
up_to_date = pyqtSignal(str)
update_error = pyqtSignal(str)
def __init__(self, current_version):
super().__init__()
self.current_version_str = current_version.lstrip('v')
def run(self):
try:
response = requests.get(GITHUB_REPO_URL, timeout=15)
response.raise_for_status()
data = response.json()
latest_version_str = data['tag_name'].lstrip('v')
current_version = parse_version(self.current_version_str)
latest_version = parse_version(latest_version_str)
if latest_version > current_version:
for asset in data.get('assets', []):
if asset['name'] == EXE_NAME:
self.update_available.emit(latest_version_str, asset['browser_download_url'])
return
self.update_error.emit(f"Update found, but '{EXE_NAME}' is missing from the release assets.")
else:
self.up_to_date.emit("You are on the latest version.")
except requests.exceptions.RequestException as e:
self.update_error.emit(f"Network error: {e}")
except Exception as e:
self.update_error.emit(f"An error occurred: {e}")
class UpdateDownloader(QThread):
"""
Downloads the new executable and runs an updater script that kills the old process,
replaces the file, and displays a message in the terminal.
"""
download_finished = pyqtSignal()
download_error = pyqtSignal(str)
def __init__(self, download_url, parent_app):
super().__init__()
self.download_url = download_url
self.parent_app = parent_app
def run(self):
try:
app_path = sys.executable
app_dir = os.path.dirname(app_path)
temp_path = os.path.join(app_dir, f"{EXE_NAME}.tmp")
old_path = os.path.join(app_dir, f"{EXE_NAME}.old")
updater_script_path = os.path.join(app_dir, "updater.bat")
# --- NEW: Path for the PID file ---
pid_file_path = os.path.join(app_dir, "updater.pid")
# Download the new executable
with requests.get(self.download_url, stream=True, timeout=300) as r:
r.raise_for_status()
with open(temp_path, 'wb') as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
# --- NEW: Write the current Process ID to the pid file ---
with open(pid_file_path, "w") as f:
f.write(str(os.getpid()))
# --- NEW BATCH SCRIPT ---
# This script now reads the PID from the "updater.pid" file.
script_content = f"""
@echo off
SETLOCAL
echo.
echo Reading process information...
set /p PID=<{pid_file_path}
echo Closing the old application (PID: %PID%)...
taskkill /F /PID %PID%
echo Waiting for files to unlock...
timeout /t 2 /nobreak > nul
echo Replacing application files...
if exist "{old_path}" del /F /Q "{old_path}"
rename "{app_path}" "{os.path.basename(old_path)}"
rename "{temp_path}" "{EXE_NAME}"
echo.
echo ============================================================
echo Update Complete!
echo You can now close this window and run {EXE_NAME}.
echo ============================================================
echo.
pause
echo Cleaning up helper files...
del "{pid_file_path}"
del "%~f0"
ENDLOCAL
"""
with open(updater_script_path, "w") as f:
f.write(script_content)
# --- Go back to the os.startfile command that we know works ---
os.startfile(updater_script_path)
self.download_finished.emit()
except Exception as e:
self.download_error.emit(f"Failed to download or run updater: {e}")

View File

@ -1,6 +1,7 @@
# --- Standard Library Imports --- # --- Standard Library Imports ---
import os import os
import json import json
import sys
# --- PyQt5 Imports --- # --- PyQt5 Imports ---
from PyQt5.QtCore import Qt, QStandardPaths from PyQt5.QtCore import Qt, QStandardPaths
@ -17,9 +18,9 @@ from ...config.constants import (
THEME_KEY, LANGUAGE_KEY, DOWNLOAD_LOCATION_KEY, THEME_KEY, LANGUAGE_KEY, DOWNLOAD_LOCATION_KEY,
RESOLUTION_KEY, UI_SCALE_KEY, SAVE_CREATOR_JSON_KEY, RESOLUTION_KEY, UI_SCALE_KEY, SAVE_CREATOR_JSON_KEY,
COOKIE_TEXT_KEY, USE_COOKIE_KEY, COOKIE_TEXT_KEY, USE_COOKIE_KEY,
FETCH_FIRST_KEY ### ADDED ### FETCH_FIRST_KEY
) )
from ...services.updater import UpdateChecker, UpdateDownloader
class FutureSettingsDialog(QDialog): class FutureSettingsDialog(QDialog):
""" """
@ -30,6 +31,7 @@ class FutureSettingsDialog(QDialog):
super().__init__(parent) super().__init__(parent)
self.parent_app = parent_app_ref self.parent_app = parent_app_ref
self.setModal(True) self.setModal(True)
self.update_downloader_thread = None # To keep a reference
app_icon = get_app_icon_object() app_icon = get_app_icon_object()
if app_icon and not app_icon.isNull(): if app_icon and not app_icon.isNull():
@ -37,7 +39,7 @@ class FutureSettingsDialog(QDialog):
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 800 screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 800
scale_factor = screen_height / 800.0 scale_factor = screen_height / 800.0
base_min_w, base_min_h = 420, 390 base_min_w, base_min_h = 420, 480 # Increased height for update section
scaled_min_w = int(base_min_w * scale_factor) scaled_min_w = int(base_min_w * scale_factor)
scaled_min_h = int(base_min_h * scale_factor) scaled_min_h = int(base_min_h * scale_factor)
self.setMinimumSize(scaled_min_w, scaled_min_h) self.setMinimumSize(scaled_min_w, scaled_min_h)
@ -53,21 +55,19 @@ class FutureSettingsDialog(QDialog):
self.interface_group_box = QGroupBox() self.interface_group_box = QGroupBox()
interface_layout = QGridLayout(self.interface_group_box) interface_layout = QGridLayout(self.interface_group_box)
# Theme # Theme, UI Scale, Language (unchanged)...
self.theme_label = QLabel() self.theme_label = QLabel()
self.theme_toggle_button = QPushButton() self.theme_toggle_button = QPushButton()
self.theme_toggle_button.clicked.connect(self._toggle_theme) self.theme_toggle_button.clicked.connect(self._toggle_theme)
interface_layout.addWidget(self.theme_label, 0, 0) interface_layout.addWidget(self.theme_label, 0, 0)
interface_layout.addWidget(self.theme_toggle_button, 0, 1) interface_layout.addWidget(self.theme_toggle_button, 0, 1)
# UI Scale
self.ui_scale_label = QLabel() self.ui_scale_label = QLabel()
self.ui_scale_combo_box = QComboBox() self.ui_scale_combo_box = QComboBox()
self.ui_scale_combo_box.currentIndexChanged.connect(self._display_setting_changed) self.ui_scale_combo_box.currentIndexChanged.connect(self._display_setting_changed)
interface_layout.addWidget(self.ui_scale_label, 1, 0) interface_layout.addWidget(self.ui_scale_label, 1, 0)
interface_layout.addWidget(self.ui_scale_combo_box, 1, 1) interface_layout.addWidget(self.ui_scale_combo_box, 1, 1)
# Language
self.language_label = QLabel() self.language_label = QLabel()
self.language_combo_box = QComboBox() self.language_combo_box = QComboBox()
self.language_combo_box.currentIndexChanged.connect(self._language_selection_changed) self.language_combo_box.currentIndexChanged.connect(self._language_selection_changed)
@ -78,6 +78,7 @@ class FutureSettingsDialog(QDialog):
self.download_window_group_box = QGroupBox() self.download_window_group_box = QGroupBox()
download_window_layout = QGridLayout(self.download_window_group_box) download_window_layout = QGridLayout(self.download_window_group_box)
self.window_size_label = QLabel() self.window_size_label = QLabel()
self.resolution_combo_box = QComboBox() self.resolution_combo_box = QComboBox()
self.resolution_combo_box.currentIndexChanged.connect(self._display_setting_changed) self.resolution_combo_box.currentIndexChanged.connect(self._display_setting_changed)
@ -100,14 +101,96 @@ class FutureSettingsDialog(QDialog):
main_layout.addWidget(self.download_window_group_box) main_layout.addWidget(self.download_window_group_box)
# --- NEW: Update Section ---
self.update_group_box = QGroupBox()
update_layout = QGridLayout(self.update_group_box)
self.version_label = QLabel()
self.update_status_label = QLabel()
self.check_update_button = QPushButton()
self.check_update_button.clicked.connect(self._check_for_updates)
update_layout.addWidget(self.version_label, 0, 0)
update_layout.addWidget(self.update_status_label, 0, 1)
update_layout.addWidget(self.check_update_button, 1, 0, 1, 2)
main_layout.addWidget(self.update_group_box)
# --- END: New Section ---
main_layout.addStretch(1) main_layout.addStretch(1)
self.ok_button = QPushButton() self.ok_button = QPushButton()
self.ok_button.clicked.connect(self.accept) self.ok_button.clicked.connect(self.accept)
main_layout.addWidget(self.ok_button, 0, Qt.AlignRight | Qt.AlignBottom) main_layout.addWidget(self.ok_button, 0, Qt.AlignRight | Qt.AlignBottom)
def _retranslate_ui(self):
self.setWindowTitle(self._tr("settings_dialog_title", "Settings"))
self.interface_group_box.setTitle(self._tr("interface_group_title", "Interface Settings"))
self.download_window_group_box.setTitle(self._tr("download_window_group_title", "Download & Window Settings"))
self.theme_label.setText(self._tr("theme_label", "Theme:"))
self.ui_scale_label.setText(self._tr("ui_scale_label", "UI Scale:"))
self.language_label.setText(self._tr("language_label", "Language:"))
self.window_size_label.setText(self._tr("window_size_label", "Window Size:"))
self.default_path_label.setText(self._tr("default_path_label", "Default Path:"))
self.save_creator_json_checkbox.setText(self._tr("save_creator_json_label", "Save Creator.json file"))
self.fetch_first_checkbox.setText(self._tr("fetch_first_label", "Fetch First (Download after all pages are found)"))
self.fetch_first_checkbox.setToolTip(self._tr("fetch_first_tooltip", "If checked, the downloader will find all posts from a creator first before starting any downloads.\nThis can be slower to start but provides a more accurate progress bar."))
self._update_theme_toggle_button_text()
self.save_path_button.setText(self._tr("settings_save_cookie_path_button", "Save Cookie + Download Path"))
self.save_path_button.setToolTip(self._tr("settings_save_cookie_path_tooltip", "Save the current 'Download Location' and Cookie settings for future sessions."))
self.ok_button.setText(self._tr("ok_button", "OK"))
# --- NEW: Translations for Update Section ---
self.update_group_box.setTitle(self._tr("update_group_title", "Application Updates"))
current_version = self.parent_app.windowTitle().split(' v')[-1]
self.version_label.setText(self._tr("current_version_label", f"Current Version: v{current_version}"))
self.update_status_label.setText(self._tr("update_status_ready", "Ready to check."))
self.check_update_button.setText(self._tr("check_for_updates_button", "Check for Updates"))
# --- END: New Translations ---
self._populate_display_combo_boxes()
self._populate_language_combo_box()
self._load_checkbox_states()
def _check_for_updates(self):
"""Starts the update check thread."""
self.check_update_button.setEnabled(False)
self.update_status_label.setText(self._tr("update_status_checking", "Checking..."))
current_version = self.parent_app.windowTitle().split(' v')[-1]
self.update_checker_thread = UpdateChecker(current_version)
self.update_checker_thread.update_available.connect(self._on_update_available)
self.update_checker_thread.up_to_date.connect(self._on_up_to_date)
self.update_checker_thread.update_error.connect(self._on_update_error)
self.update_checker_thread.start()
def _on_update_available(self, new_version, download_url):
self.update_status_label.setText(self._tr("update_status_found", f"Update found: v{new_version}"))
self.check_update_button.setEnabled(True)
reply = QMessageBox.question(self, self._tr("update_available_title", "Update Available"),
self._tr("update_available_message", f"A new version (v{new_version}) is available.\nWould you like to download and install it now?"),
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
if reply == QMessageBox.Yes:
self.ok_button.setEnabled(False)
self.check_update_button.setEnabled(False)
self.update_status_label.setText(self._tr("update_status_downloading", "Downloading update..."))
self.update_downloader_thread = UpdateDownloader(download_url, self.parent_app)
self.update_downloader_thread.download_finished.connect(self._on_download_finished)
self.update_downloader_thread.download_error.connect(self._on_update_error)
self.update_downloader_thread.start()
def _on_download_finished(self):
QApplication.instance().quit()
def _on_up_to_date(self, message):
self.update_status_label.setText(self._tr("update_status_latest", message))
self.check_update_button.setEnabled(True)
def _on_update_error(self, message):
self.update_status_label.setText(self._tr("update_status_error", f"Error: {message}"))
self.check_update_button.setEnabled(True)
self.ok_button.setEnabled(True)
# --- (The rest of the file remains unchanged from your provided code) ---
def _load_checkbox_states(self): def _load_checkbox_states(self):
"""Loads the initial state for all checkboxes from settings."""
self.save_creator_json_checkbox.blockSignals(True) self.save_creator_json_checkbox.blockSignals(True)
should_save = self.parent_app.settings.value(SAVE_CREATOR_JSON_KEY, True, type=bool) should_save = self.parent_app.settings.value(SAVE_CREATOR_JSON_KEY, True, type=bool)
self.save_creator_json_checkbox.setChecked(should_save) self.save_creator_json_checkbox.setChecked(should_save)
@ -119,13 +202,11 @@ class FutureSettingsDialog(QDialog):
self.fetch_first_checkbox.blockSignals(False) self.fetch_first_checkbox.blockSignals(False)
def _creator_json_setting_changed(self, state): def _creator_json_setting_changed(self, state):
"""Saves the state of the 'Save Creator.json' checkbox."""
is_checked = state == Qt.Checked is_checked = state == Qt.Checked
self.parent_app.settings.setValue(SAVE_CREATOR_JSON_KEY, is_checked) self.parent_app.settings.setValue(SAVE_CREATOR_JSON_KEY, is_checked)
self.parent_app.settings.sync() self.parent_app.settings.sync()
def _fetch_first_setting_changed(self, state): def _fetch_first_setting_changed(self, state):
"""Saves the state of the 'Fetch First' checkbox."""
is_checked = state == Qt.Checked is_checked = state == Qt.Checked
self.parent_app.settings.setValue(FETCH_FIRST_KEY, is_checked) self.parent_app.settings.setValue(FETCH_FIRST_KEY, is_checked)
self.parent_app.settings.sync() self.parent_app.settings.sync()
@ -135,34 +216,6 @@ class FutureSettingsDialog(QDialog):
return get_translation(self.parent_app.current_selected_language, key, default_text) return get_translation(self.parent_app.current_selected_language, key, default_text)
return default_text return default_text
def _retranslate_ui(self):
self.setWindowTitle(self._tr("settings_dialog_title", "Settings"))
self.interface_group_box.setTitle(self._tr("interface_group_title", "Interface Settings"))
self.download_window_group_box.setTitle(self._tr("download_window_group_title", "Download & Window Settings"))
self.theme_label.setText(self._tr("theme_label", "Theme:"))
self.ui_scale_label.setText(self._tr("ui_scale_label", "UI Scale:"))
self.language_label.setText(self._tr("language_label", "Language:"))
self.window_size_label.setText(self._tr("window_size_label", "Window Size:"))
self.default_path_label.setText(self._tr("default_path_label", "Default Path:"))
self.save_creator_json_checkbox.setText(self._tr("save_creator_json_label", "Save Creator.json file"))
self.fetch_first_checkbox.setText(self._tr("fetch_first_label", "Fetch First (Download after all pages are found)"))
self.fetch_first_checkbox.setToolTip(self._tr("fetch_first_tooltip", "If checked, the downloader will find all posts from a creator first before starting any downloads.\nThis can be slower to start but provides a more accurate progress bar."))
self._update_theme_toggle_button_text()
self.save_path_button.setText(self._tr("settings_save_cookie_path_button", "Save Cookie + Download Path"))
self.save_path_button.setToolTip(self._tr("settings_save_cookie_path_tooltip", "Save the current 'Download Location' and Cookie settings for future sessions."))
self.ok_button.setText(self._tr("ok_button", "OK"))
self._populate_display_combo_boxes()
self._populate_language_combo_box()
self._load_checkbox_states()
# --- (The rest of the file remains unchanged) ---
def _apply_theme(self): def _apply_theme(self):
if self.parent_app and self.parent_app.current_theme == "dark": if self.parent_app and self.parent_app.current_theme == "dark":
scale = getattr(self.parent_app, 'scale_factor', 1) scale = getattr(self.parent_app, 'scale_factor', 1)
@ -188,14 +241,7 @@ class FutureSettingsDialog(QDialog):
def _populate_display_combo_boxes(self): def _populate_display_combo_boxes(self):
self.resolution_combo_box.blockSignals(True) self.resolution_combo_box.blockSignals(True)
self.resolution_combo_box.clear() self.resolution_combo_box.clear()
resolutions = [ resolutions = [("Auto", "Auto"), ("1280x720", "1280x720"), ("1600x900", "1600x900"), ("1920x1080", "1920x1080")]
("Auto", self._tr("auto_resolution", "Auto (System Default)")),
("1280x720", "1280 x 720"),
("1600x900", "1600 x 900"),
("1920x1080", "1920 x 1080 (Full HD)"),
("2560x1440", "2560 x 1440 (2K)"),
("3840x2160", "3840 x 2160 (4K)")
]
current_res = self.parent_app.settings.value(RESOLUTION_KEY, "Auto") current_res = self.parent_app.settings.value(RESOLUTION_KEY, "Auto")
for res_key, res_name in resolutions: for res_key, res_name in resolutions:
self.resolution_combo_box.addItem(res_name, res_key) self.resolution_combo_box.addItem(res_name, res_key)
@ -215,34 +261,21 @@ class FutureSettingsDialog(QDialog):
(1.75, "175%"), (1.75, "175%"),
(2.0, "200%") (2.0, "200%")
] ]
current_scale = self.parent_app.settings.value(UI_SCALE_KEY, 1.0)
current_scale = float(self.parent_app.settings.value(UI_SCALE_KEY, 1.0))
for scale_val, scale_name in scales: for scale_val, scale_name in scales:
self.ui_scale_combo_box.addItem(scale_name, scale_val) self.ui_scale_combo_box.addItem(scale_name, scale_val)
if abs(current_scale - scale_val) < 0.01: if abs(float(current_scale) - scale_val) < 0.01:
self.ui_scale_combo_box.setCurrentIndex(self.ui_scale_combo_box.count() - 1) self.ui_scale_combo_box.setCurrentIndex(self.ui_scale_combo_box.count() - 1)
self.ui_scale_combo_box.blockSignals(False) self.ui_scale_combo_box.blockSignals(False)
def _display_setting_changed(self): def _display_setting_changed(self):
selected_res = self.resolution_combo_box.currentData() selected_res = self.resolution_combo_box.currentData()
selected_scale = self.ui_scale_combo_box.currentData() selected_scale = self.ui_scale_combo_box.currentData()
self.parent_app.settings.setValue(RESOLUTION_KEY, selected_res) self.parent_app.settings.setValue(RESOLUTION_KEY, selected_res)
self.parent_app.settings.setValue(UI_SCALE_KEY, selected_scale) self.parent_app.settings.setValue(UI_SCALE_KEY, selected_scale)
self.parent_app.settings.sync() self.parent_app.settings.sync()
QMessageBox.information(self, self._tr("display_change_title", "Display Settings Changed"),
msg_box = QMessageBox(self) self._tr("language_change_message", "A restart is required..."))
msg_box.setIcon(QMessageBox.Information)
msg_box.setWindowTitle(self._tr("display_change_title", "Display Settings Changed"))
msg_box.setText(self._tr("language_change_message", "A restart is required for these changes to take effect."))
msg_box.setInformativeText(self._tr("language_change_informative", "Would you like to restart now?"))
restart_button = msg_box.addButton(self._tr("restart_now_button", "Restart Now"), QMessageBox.ApplyRole)
ok_button = msg_box.addButton(self._tr("ok_button", "OK"), QMessageBox.AcceptRole)
msg_box.setDefaultButton(ok_button)
msg_box.exec_()
if msg_box.clickedButton() == restart_button:
self.parent_app._request_restart_application()
def _populate_language_combo_box(self): def _populate_language_combo_box(self):
self.language_combo_box.blockSignals(True) self.language_combo_box.blockSignals(True)
@ -266,39 +299,23 @@ class FutureSettingsDialog(QDialog):
self.parent_app.settings.setValue(LANGUAGE_KEY, selected_lang_code) self.parent_app.settings.setValue(LANGUAGE_KEY, selected_lang_code)
self.parent_app.settings.sync() self.parent_app.settings.sync()
self.parent_app.current_selected_language = selected_lang_code self.parent_app.current_selected_language = selected_lang_code
self._retranslate_ui() self._retranslate_ui()
if hasattr(self.parent_app, '_retranslate_main_ui'): if hasattr(self.parent_app, '_retranslate_main_ui'):
self.parent_app._retranslate_main_ui() self.parent_app._retranslate_main_ui()
QMessageBox.information(self, self._tr("language_change_title", "Language Changed"),
msg_box = QMessageBox(self) self._tr("language_change_message", "A restart is required..."))
msg_box.setIcon(QMessageBox.Information)
msg_box.setWindowTitle(self._tr("language_change_title", "Language Changed"))
msg_box.setText(self._tr("language_change_message", "A restart is required..."))
msg_box.setInformativeText(self._tr("language_change_informative", "Would you like to restart now?"))
restart_button = msg_box.addButton(self._tr("restart_now_button", "Restart Now"), QMessageBox.ApplyRole)
ok_button = msg_box.addButton(self._tr("ok_button", "OK"), QMessageBox.AcceptRole)
msg_box.setDefaultButton(ok_button)
msg_box.exec_()
if msg_box.clickedButton() == restart_button:
self.parent_app._request_restart_application()
def _save_cookie_and_path(self): def _save_cookie_and_path(self):
"""Saves the current download path and/or cookie settings from the main window."""
path_saved = False path_saved = False
cookie_saved = False cookie_saved = False
if hasattr(self.parent_app, 'dir_input') and self.parent_app.dir_input: if hasattr(self.parent_app, 'dir_input') and self.parent_app.dir_input:
current_path = self.parent_app.dir_input.text().strip() current_path = self.parent_app.dir_input.text().strip()
if current_path and os.path.isdir(current_path): if current_path and os.path.isdir(current_path):
self.parent_app.settings.setValue(DOWNLOAD_LOCATION_KEY, current_path) self.parent_app.settings.setValue(DOWNLOAD_LOCATION_KEY, current_path)
path_saved = True path_saved = True
if hasattr(self.parent_app, 'use_cookie_checkbox'): if hasattr(self.parent_app, 'use_cookie_checkbox'):
use_cookie = self.parent_app.use_cookie_checkbox.isChecked() use_cookie = self.parent_app.use_cookie_checkbox.isChecked()
cookie_content = self.parent_app.cookie_text_input.text().strip() cookie_content = self.parent_app.cookie_text_input.text().strip()
if use_cookie and cookie_content: if use_cookie and cookie_content:
self.parent_app.settings.setValue(USE_COOKIE_KEY, True) self.parent_app.settings.setValue(USE_COOKIE_KEY, True)
self.parent_app.settings.setValue(COOKIE_TEXT_KEY, cookie_content) self.parent_app.settings.setValue(COOKIE_TEXT_KEY, cookie_content)
@ -306,19 +323,8 @@ class FutureSettingsDialog(QDialog):
else: else:
self.parent_app.settings.setValue(USE_COOKIE_KEY, False) self.parent_app.settings.setValue(USE_COOKIE_KEY, False)
self.parent_app.settings.setValue(COOKIE_TEXT_KEY, "") self.parent_app.settings.setValue(COOKIE_TEXT_KEY, "")
self.parent_app.settings.sync() self.parent_app.settings.sync()
if path_saved or cookie_saved:
# --- User Feedback --- QMessageBox.information(self, "Settings Saved", "Settings have been saved.")
if path_saved and cookie_saved:
message = self._tr("settings_save_both_success", "Download location and cookie settings saved.")
elif path_saved:
message = self._tr("settings_save_path_only_success", "Download location saved. No cookie settings were active to save.")
elif cookie_saved:
message = self._tr("settings_save_cookie_only_success", "Cookie settings saved. Download location was not set.")
else: else:
QMessageBox.warning(self, self._tr("settings_save_nothing_title", "Nothing to Save"), QMessageBox.warning(self, "Nothing to Save", "No valid settings to save.")
self._tr("settings_save_nothing_message", "The download location is not a valid directory and no cookie was active."))
return
QMessageBox.information(self, self._tr("settings_save_success_title", "Settings Saved"), message)

View File

@ -38,6 +38,9 @@ from ..core.api_client import download_from_api
from ..core.discord_client import fetch_server_channels, fetch_channel_messages from ..core.discord_client import fetch_server_channels, fetch_channel_messages
from ..core.manager import DownloadManager from ..core.manager import DownloadManager
from ..core.nhentai_client import fetch_nhentai_gallery from ..core.nhentai_client import fetch_nhentai_gallery
from ..core.bunkr_client import fetch_bunkr_data
from ..core.saint2_client import fetch_saint2_data
from ..core.erome_client import fetch_erome_data
from .assets import get_app_icon_object from .assets import get_app_icon_object
from ..config.constants import * from ..config.constants import *
from ..utils.file_utils import KNOWN_NAMES, clean_folder_name from ..utils.file_utils import KNOWN_NAMES, clean_folder_name
@ -283,19 +286,15 @@ class DownloaderApp (QWidget ):
self.download_location_label_widget = None self.download_location_label_widget = None
self.remove_from_filename_label_widget = None self.remove_from_filename_label_widget = None
self.skip_words_label_widget = None self.skip_words_label_widget = None
self.setWindowTitle("Kemono Downloader v6.5.0") self.setWindowTitle("Kemono Downloader v7.0.0")
setup_ui(self) setup_ui(self)
self._connect_signals() self._connect_signals()
self.log_signal.emit(" Local API server functionality has been removed.")
self.log_signal.emit(" 'Skip Current File' button has been removed.")
if hasattr(self, 'character_input'): if hasattr(self, 'character_input'):
self.character_input.setToolTip(self._tr("character_input_tooltip", "Enter character names (comma-separated)...")) self.character_input.setToolTip(self._tr("character_input_tooltip", "Enter character names (comma-separated)..."))
self.log_signal.emit(f" Manga filename style loaded: '{self.manga_filename_style}'") self.log_signal.emit(f" Manga filename style loaded: '{self.manga_filename_style}'")
self.log_signal.emit(f" Skip words scope loaded: '{self.skip_words_scope}'") self.log_signal.emit(f" Skip words scope loaded: '{self.skip_words_scope}'")
self.log_signal.emit(f" Character filter scope set to default: '{self.char_filter_scope}'") self.log_signal.emit(f" Character filter scope set to default: '{self.char_filter_scope}'")
self.log_signal.emit(f" Multi-part download defaults to: {'Enabled' if self.allow_multipart_download_setting else 'Disabled'}") self.log_signal.emit(f" Multi-part download defaults to: {'Enabled' if self.allow_multipart_download_setting else 'Disabled'}")
self.log_signal.emit(f" Cookie text defaults to: Empty on launch")
self.log_signal.emit(f" 'Use Cookie' setting defaults to: Disabled on launch")
self.log_signal.emit(f" Scan post content for images defaults to: {'Enabled' if self.scan_content_images_setting else 'Disabled'}") self.log_signal.emit(f" Scan post content for images defaults to: {'Enabled' if self.scan_content_images_setting else 'Disabled'}")
self.log_signal.emit(f" Application language loaded: '{self.current_selected_language.upper()}' (UI may not reflect this yet).") self.log_signal.emit(f" Application language loaded: '{self.current_selected_language.upper()}' (UI may not reflect this yet).")
self._retranslate_main_ui() self._retranslate_main_ui()
@ -304,6 +303,18 @@ class DownloaderApp (QWidget ):
self._load_saved_cookie_settings() self._load_saved_cookie_settings()
self._update_button_states_and_connections() self._update_button_states_and_connections()
self._check_for_interrupted_session() self._check_for_interrupted_session()
self._cleanup_after_update()
def _cleanup_after_update(self):
"""Deletes the old executable after a successful update."""
try:
app_path = sys.executable
old_app_path = os.path.join(os.path.dirname(app_path), "Kemono.Downloader.exe.old")
if os.path.exists(old_app_path):
os.remove(old_app_path)
self.log_signal.emit(" Cleaned up old application file after update.")
except Exception as e:
self.log_signal.emit(f"⚠️ Could not remove old application file: {e}")
def _apply_theme_and_restart_prompt(self): def _apply_theme_and_restart_prompt(self):
"""Applies the theme and prompts the user to restart.""" """Applies the theme and prompts the user to restart."""
@ -925,7 +936,7 @@ class DownloaderApp (QWidget ):
if hasattr (self ,'use_cookie_checkbox'): if hasattr (self ,'use_cookie_checkbox'):
self .use_cookie_checkbox .toggled .connect (self ._update_cookie_input_visibility ) self .use_cookie_checkbox .toggled .connect (self ._update_cookie_input_visibility )
if hasattr (self ,'link_input'): if hasattr (self ,'link_input'):
self .link_input .textChanged .connect (self ._sync_queue_with_link_input ) self.link_input.textChanged.connect(self._update_ui_for_url_change)
self.link_input.textChanged.connect(self._update_contextual_ui_elements) self.link_input.textChanged.connect(self._update_contextual_ui_elements)
self.link_input.textChanged.connect(self._update_button_states_and_connections) self.link_input.textChanged.connect(self._update_button_states_and_connections)
if hasattr(self, 'discord_scope_toggle_button'): if hasattr(self, 'discord_scope_toggle_button'):
@ -3125,15 +3136,75 @@ class DownloaderApp (QWidget ):
if total_posts >0 or processed_posts >0 : if total_posts >0 or processed_posts >0 :
self .file_progress_label .setText ("") self .file_progress_label .setText ("")
def _set_ui_for_specialized_downloader(self, is_specialized):
"""Disables or enables UI elements for non-standard downloaders."""
widgets_to_disable = [
self.page_range_label, self.start_page_input, self.to_label, self.end_page_input,
self.character_filter_widget, self.skip_words_input, self.skip_scope_toggle_button,
self.remove_from_filename_input, self.radio_images, self.radio_videos,
self.radio_only_archives, self.radio_only_links, self.radio_only_audio, self.radio_more,
self.skip_zip_checkbox, self.download_thumbnails_checkbox, self.compress_images_checkbox,
self.use_subfolders_checkbox, self.use_subfolder_per_post_checkbox,
self.scan_content_images_checkbox, self.external_links_checkbox, self.manga_mode_checkbox,
self.use_cookie_checkbox, self.keep_duplicates_checkbox, self.date_prefix_checkbox,
self.manga_rename_toggle_button, self.manga_date_prefix_input,
self.multipart_toggle_button, self.custom_folder_input, self.custom_folder_label,
self.discord_scope_toggle_button, self.save_discord_as_pdf_btn
]
enable_state = not is_specialized
for widget in widgets_to_disable:
if widget:
widget.setEnabled(enable_state)
# When disabling, force 'All' to be checked and disable it too
if is_specialized and self.radio_all:
self.radio_all.setChecked(True)
self.radio_all.setEnabled(False)
elif self.radio_all:
self.radio_all.setEnabled(True)
# Re-run standard UI logic when re-enabling to restore correct states
if enable_state:
self._update_all_ui_states()
def _update_all_ui_states(self):
"""A single function to call all UI update methods to restore state."""
is_manga_checked = self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False
is_subfolder_checked = self.use_subfolders_checkbox.isChecked() if self.use_subfolders_checkbox else False
is_cookie_checked = self.use_cookie_checkbox.isChecked() if self.use_cookie_checkbox else False
self.update_ui_for_manga_mode(is_manga_checked)
self.update_custom_folder_visibility()
self.update_page_range_enabled_state()
if self.radio_group.checkedButton():
self._handle_filter_mode_change(self.radio_group.checkedButton(), True)
self.update_ui_for_subfolders(is_subfolder_checked)
self._update_cookie_input_visibility(is_cookie_checked)
def _update_contextual_ui_elements(self, text=""): def _update_contextual_ui_elements(self, text=""):
"""Shows or hides UI elements based on the URL, like the Discord scope button.""" """Shows or hides UI elements based on the URL, like the Discord scope button."""
if not hasattr(self, 'discord_scope_toggle_button'): return if not hasattr(self, 'discord_scope_toggle_button'): return
url_text = self.link_input.text().strip() url_text = self.link_input.text().strip()
service, _, _ = extract_post_info(url_text) service, _, _ = extract_post_info(url_text)
# Handle specialized downloaders (Bunkr, nhentai)
is_saint2 = 'saint2.su' in url_text or 'saint2.pk' in url_text
is_erome = 'erome.com' in url_text
is_specialized = service in ['bunkr', 'nhentai'] or is_saint2 or is_erome
self._set_ui_for_specialized_downloader(is_specialized)
# Handle Discord UI
is_discord = (service == 'discord') is_discord = (service == 'discord')
self.discord_scope_toggle_button.setVisible(is_discord) self.discord_scope_toggle_button.setVisible(is_discord)
if is_discord: self._update_discord_scope_button_text() self.save_discord_as_pdf_btn.setVisible(is_discord)
else: self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
if is_discord:
self._update_discord_scope_button_text()
elif not is_specialized: # Don't change button text for specialized downloaders
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
def _update_discord_scope_button_text(self): def _update_discord_scope_button_text(self):
"""Updates the text of the discord scope button and the main download button.""" """Updates the text of the discord scope button and the main download button."""
@ -3218,7 +3289,66 @@ class DownloaderApp (QWidget ):
api_url = direct_api_url if direct_api_url else self.link_input.text().strip() api_url = direct_api_url if direct_api_url else self.link_input.text().strip()
# --- NEW: NHENTAI BATCH DOWNLOAD LOGIC --- # --- START: MOVED AND CORRECTED LOGIC ---
# This block is moved to run before any special URL checks.
main_ui_download_dir = self.dir_input.text().strip()
extract_links_only = (self.radio_only_links and self.radio_only_links.isChecked())
effective_output_dir_for_run = ""
if override_output_dir:
if not main_ui_download_dir:
QMessageBox.critical(self, "Configuration Error",
"The main 'Download Location' must be set in the UI "
"before downloading favorites with 'Artist Folders' scope.")
if self.is_processing_favorites_queue:
self.log_signal.emit(f"❌ Favorite download for '{api_url}' skipped: Main download directory not set.")
return False
if not os.path.isdir(main_ui_download_dir):
QMessageBox.critical(self, "Directory Error",
f"The main 'Download Location' ('{main_ui_download_dir}') "
"does not exist or is not a directory. Please set a valid one for 'Artist Folders' scope.")
if self.is_processing_favorites_queue:
self.log_signal.emit(f"❌ Favorite download for '{api_url}' skipped: Main download directory invalid.")
return False
effective_output_dir_for_run = os.path.normpath(override_output_dir)
else:
is_special_downloader = 'saint2.su' in api_url or 'saint2.pk' in api_url or 'nhentai.net' in api_url or 'bunkr' in api_url or 'erome.com' in api_url
if not extract_links_only and not main_ui_download_dir:
QMessageBox.critical(self, "Input Error", "Download Directory is required.")
return False
if not extract_links_only and main_ui_download_dir and not os.path.isdir(main_ui_download_dir):
reply = QMessageBox.question(self, "Create Directory?",
f"The directory '{main_ui_download_dir}' does not exist.\nCreate it now?",
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
if reply == QMessageBox.Yes:
try:
os.makedirs(main_ui_download_dir, exist_ok=True)
self.log_signal.emit(f" Created directory: {main_ui_download_dir}")
except Exception as e:
QMessageBox.critical(self, "Directory Error", f"Could not create directory: {e}")
return False
else:
self.log_signal.emit("❌ Download cancelled: Output directory does not exist and was not created.")
return False
effective_output_dir_for_run = os.path.normpath(main_ui_download_dir) if main_ui_download_dir else ""
if 'erome.com' in api_url:
self.log_signal.emit(" Erome.com URL detected. Starting dedicated Erome download.")
self.set_ui_enabled(False)
self.download_thread = EromeDownloadThread(api_url, effective_output_dir_for_run, self)
self.download_thread.progress_signal.connect(self.handle_main_log)
self.download_thread.file_progress_signal.connect(self.update_file_progress_display)
self.download_thread.finished_signal.connect(
lambda dl, skip, cancelled: self.download_finished(dl, skip, cancelled, [])
)
self.download_thread.start()
self._update_button_states_and_connections()
return True
if 'nhentai.net' in api_url and not re.search(r'/g/(\d+)', api_url): if 'nhentai.net' in api_url and not re.search(r'/g/(\d+)', api_url):
self.log_signal.emit("=" * 40) self.log_signal.emit("=" * 40)
self.log_signal.emit("🚀 nhentai batch download mode detected.") self.log_signal.emit("🚀 nhentai batch download mode detected.")
@ -3235,7 +3365,6 @@ class DownloaderApp (QWidget ):
try: try:
with open(nhentai_txt_path, 'r', encoding='utf-8') as f: with open(nhentai_txt_path, 'r', encoding='utf-8') as f:
for line in f: for line in f:
# Find all URLs in the line (handles comma separation or just spaces)
found_urls = re.findall(r'https?://nhentai\.net/g/\d+/?', line) found_urls = re.findall(r'https?://nhentai\.net/g/\d+/?', line)
if found_urls: if found_urls:
urls_to_download.extend(found_urls) urls_to_download.extend(found_urls)
@ -3261,49 +3390,65 @@ class DownloaderApp (QWidget ):
if not self.is_processing_favorites_queue: if not self.is_processing_favorites_queue:
self._process_next_favorite_download() self._process_next_favorite_download()
return True return True
# --- END NEW LOGIC ---
main_ui_download_dir = self.dir_input.text().strip() is_saint2_url = 'saint2.su' in api_url or 'saint2.pk' in api_url
extract_links_only = (self.radio_only_links and self.radio_only_links.isChecked()) if is_saint2_url:
effective_output_dir_for_run = "" # First, check if it's the batch command. If so, do nothing here and let the next block handle it.
if api_url.strip().lower() != 'saint2.su':
self.log_signal.emit(" Saint2.su URL detected. Starting dedicated Saint2 download.")
self.set_ui_enabled(False)
self.download_thread = Saint2DownloadThread(api_url, effective_output_dir_for_run, self)
self.download_thread.progress_signal.connect(self.handle_main_log)
self.download_thread.file_progress_signal.connect(self.update_file_progress_display)
self.download_thread.finished_signal.connect(
lambda dl, skip, cancelled: self.download_finished(dl, skip, cancelled, [])
)
self.download_thread.start()
self._update_button_states_and_connections()
return True
if override_output_dir: if api_url.strip().lower() == 'saint2.su':
if not main_ui_download_dir: self.log_signal.emit("=" * 40)
QMessageBox.critical(self, "Configuration Error", self.log_signal.emit("🚀 Saint2.su batch download mode detected.")
"The main 'Download Location' must be set in the UI "
"before downloading favorites with 'Artist Folders' scope.") saint2_txt_path = os.path.join(self.app_base_dir, "appdata", "saint2.su.txt")
if self.is_processing_favorites_queue: self.log_signal.emit(f" Looking for batch file at: {saint2_txt_path}")
self.log_signal.emit(f"❌ Favorite download for '{api_url}' skipped: Main download directory not set.")
if not os.path.exists(saint2_txt_path):
QMessageBox.warning(self, "File Not Found", f"To use batch mode, create a file named 'saint2.su.txt' in your 'appdata' folder.\n\nPlace one saint2.su URL on each line.")
self.log_signal.emit(f"'saint2.su.txt' not found. Aborting batch download.")
return False return False
if not os.path.isdir(main_ui_download_dir): urls_to_download = []
QMessageBox.critical(self, "Directory Error",
f"The main 'Download Location' ('{main_ui_download_dir}') "
"does not exist or is not a directory. Please set a valid one for 'Artist Folders' scope.")
if self.is_processing_favorites_queue:
self.log_signal.emit(f"❌ Favorite download for '{api_url}' skipped: Main download directory invalid.")
return False
effective_output_dir_for_run = os.path.normpath(override_output_dir)
else:
if not extract_links_only and not main_ui_download_dir:
QMessageBox.critical(self, "Input Error", "Download Directory is required when not in 'Only Links' mode.")
return False
if not extract_links_only and main_ui_download_dir and not os.path.isdir(main_ui_download_dir):
reply = QMessageBox.question(self, "Create Directory?",
f"The directory '{main_ui_download_dir}' does not exist.\nCreate it now?",
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
if reply == QMessageBox.Yes:
try: try:
os.makedirs(main_ui_download_dir, exist_ok=True) with open(saint2_txt_path, 'r', encoding='utf-8') as f:
self.log_signal.emit(f" Created directory: {main_ui_download_dir}") for line in f:
# Find valid saint2 URLs in the line
found_urls = re.findall(r'https?://saint\d*\.(?:su|pk|cr|to)/(?:a|d|embed)/[^/?#\s]+', line)
if found_urls:
urls_to_download.extend(found_urls)
except Exception as e: except Exception as e:
QMessageBox.critical(self, "Directory Error", f"Could not create directory: {e}") QMessageBox.critical(self, "File Error", f"Could not read 'saint2.su.txt':\n{e}")
self.log_signal.emit(f" ❌ Error reading 'saint2.su.txt': {e}")
return False return False
else:
self.log_signal.emit("❌ Download cancelled: Output directory does not exist and was not created.") if not urls_to_download:
QMessageBox.information(self, "Empty File", "No valid saint2.su URLs were found in 'saint2.su.txt'.")
self.log_signal.emit(" 'saint2.su.txt' was found but contained no valid URLs.")
return False return False
effective_output_dir_for_run = os.path.normpath(main_ui_download_dir)
self.log_signal.emit(f" Found {len(urls_to_download)} URLs to process.")
self.favorite_download_queue.clear()
for url in urls_to_download:
self.favorite_download_queue.append({
'url': url,
'name': f"saint2.su link from batch",
'type': 'post' # Treat each URL as a single post-like item
})
if not self.is_processing_favorites_queue:
self._process_next_favorite_download()
return True
if not is_restore: if not is_restore:
self._create_initial_session_file(api_url, effective_output_dir_for_run, remaining_queue=self.favorite_download_queue) self._create_initial_session_file(api_url, effective_output_dir_for_run, remaining_queue=self.favorite_download_queue)
@ -3328,26 +3473,24 @@ class DownloaderApp (QWidget ):
self.cancellation_message_logged_this_session = False self.cancellation_message_logged_this_session = False
# --- MODIFIED NHENTAI HANDLING --- service, id1, id2 = extract_post_info(api_url)
nhentai_match = re.search(r'nhentai\.net/g/(\d+)', api_url)
if nhentai_match: if service == 'nhentai':
gallery_id = nhentai_match.group(1) gallery_id = id1
self.log_signal.emit("=" * 40) self.log_signal.emit("=" * 40)
self.log_signal.emit(f"🚀 Detected nhentai gallery ID: {gallery_id}") self.log_signal.emit(f"🚀 Detected nhentai gallery ID: {gallery_id}")
output_dir = self.dir_input.text().strip() if not effective_output_dir_for_run or not os.path.isdir(effective_output_dir_for_run):
if not output_dir or not os.path.isdir(output_dir):
QMessageBox.critical(self, "Input Error", "A valid Download Location is required.") QMessageBox.critical(self, "Input Error", "A valid Download Location is required.")
return False return False
gallery_data = fetch_nhentai_gallery(gallery_id, self.log_signal.emit) gallery_data = fetch_nhentai_gallery(gallery_id, self.log_signal.emit)
if not gallery_data: if not gallery_data:
QMessageBox.critical(self, "Error", f"Could not retrieve gallery data for ID {gallery_id}. It may not exist or the API is unavailable.") QMessageBox.critical(self, "Error", f"Could not retrieve gallery data for ID {gallery_id}.")
return False return False
self.set_ui_enabled(False) self.set_ui_enabled(False)
self.download_thread = NhentaiDownloadThread(gallery_data, output_dir, self) self.download_thread = NhentaiDownloadThread(gallery_data, effective_output_dir_for_run, self)
self.download_thread.progress_signal.connect(self.handle_main_log) self.download_thread.progress_signal.connect(self.handle_main_log)
self.download_thread.finished_signal.connect( self.download_thread.finished_signal.connect(
lambda dl, skip, cancelled: self.download_finished(dl, skip, cancelled, []) lambda dl, skip, cancelled: self.download_finished(dl, skip, cancelled, [])
@ -3355,9 +3498,20 @@ class DownloaderApp (QWidget ):
self.download_thread.start() self.download_thread.start()
self._update_button_states_and_connections() self._update_button_states_and_connections()
return True return True
# --- END MODIFIED HANDLING ---
service, id1, id2 = extract_post_info(api_url) if service == 'bunkr':
self.log_signal.emit(" Bunkr URL detected. Starting dedicated Bunkr download.")
self.set_ui_enabled(False)
self.download_thread = BunkrDownloadThread(id1, effective_output_dir_for_run, self)
self.download_thread.progress_signal.connect(self.handle_main_log)
self.download_thread.file_progress_signal.connect(self.update_file_progress_display)
self.download_thread.finished_signal.connect(
lambda dl, skip, cancelled: self.download_finished(dl, skip, cancelled, [])
)
self.download_thread.start()
self._update_button_states_and_connections()
return True
if not service or not id1: if not service or not id1:
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.") QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
@ -4137,6 +4291,79 @@ class DownloaderApp (QWidget ):
self.is_restore_pending = True self.is_restore_pending = True
self.start_download(direct_api_url=restore_url, override_output_dir=restore_dir, is_restore=True) self.start_download(direct_api_url=restore_url, override_output_dir=restore_dir, is_restore=True)
def _update_ui_for_url_change(self, text=""):
"""A single, authoritative function to update all UI states based on the URL."""
url_text = self.link_input.text().strip()
service, _, _ = extract_post_info(url_text)
# A list of all widgets that are context-dependent
widgets_to_manage = [
self.page_range_label, self.start_page_input, self.to_label, self.end_page_input,
self.character_filter_widget, self.skip_words_input, self.skip_scope_toggle_button,
self.remove_from_filename_input, self.radio_all, self.radio_images, self.radio_videos,
self.radio_only_archives, self.radio_only_links, self.radio_only_audio, self.radio_more,
self.skip_zip_checkbox, self.download_thumbnails_checkbox, self.compress_images_checkbox,
self.use_subfolders_checkbox, self.use_subfolder_per_post_checkbox,
self.scan_content_images_checkbox, self.external_links_checkbox, self.manga_mode_checkbox,
self.use_cookie_checkbox, self.keep_duplicates_checkbox, self.date_prefix_checkbox,
self.manga_rename_toggle_button, self.manga_date_prefix_input,
self.multipart_toggle_button, self.custom_folder_input, self.custom_folder_label
]
# --- Logic for Specialized Downloaders (Bunkr, nhentai) ---
if service in ['bunkr', 'nhentai']:
self.progress_log_label.setText("📜 Progress Log:")
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
# Disable all complex settings
for widget in widgets_to_manage:
if widget:
widget.setEnabled(False)
# Force 'All' filter and disable it
if self.radio_all:
self.radio_all.setChecked(True)
# Ensure Discord UI is hidden
if hasattr(self, 'discord_scope_toggle_button'):
self.discord_scope_toggle_button.setVisible(False)
if hasattr(self, 'save_discord_as_pdf_btn'):
self.save_discord_as_pdf_btn.setVisible(False)
return # CRUCIAL: Stop here for specialized URLs
# --- Logic for Standard Downloaders (Kemono, Coomer, Discord) ---
# First, re-enable all managed widgets as a baseline
for widget in widgets_to_manage:
if widget:
widget.setEnabled(True)
# Now, apply context-specific rules for the standard downloaders
is_discord = (service == 'discord')
if hasattr(self, 'discord_scope_toggle_button'):
self.discord_scope_toggle_button.setVisible(is_discord)
if hasattr(self, 'save_discord_as_pdf_btn'):
self.save_discord_as_pdf_btn.setVisible(is_discord)
if is_discord:
self._update_discord_scope_button_text()
else:
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
# Re-run all the standard UI state functions to apply the correct logic
is_manga_checked = self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False
is_subfolder_checked = self.use_subfolders_checkbox.isChecked() if self.use_subfolders_checkbox else False
is_cookie_checked = self.use_cookie_checkbox.isChecked() if self.use_cookie_checkbox else False
self.update_ui_for_manga_mode(is_manga_checked)
self.update_custom_folder_visibility()
self.update_page_range_enabled_state()
if self.radio_group and self.radio_group.checkedButton():
self._handle_filter_mode_change(self.radio_group.checkedButton(), True)
self.update_ui_for_subfolders(is_subfolder_checked)
self._update_cookie_input_visibility(is_cookie_checked)
def start_single_threaded_download (self ,**kwargs ): def start_single_threaded_download (self ,**kwargs ):
global BackendDownloadThread global BackendDownloadThread
try : try :
@ -4825,6 +5052,18 @@ class DownloaderApp (QWidget ):
self.log_signal.emit(" Signaling nhentai download thread to cancel.") self.log_signal.emit(" Signaling nhentai download thread to cancel.")
self.download_thread.cancel() self.download_thread.cancel()
if isinstance(self.download_thread, BunkrDownloadThread):
self.log_signal.emit(" Signaling Bunkr download thread to cancel.")
self.download_thread.cancel()
if isinstance(self.download_thread, Saint2DownloadThread):
self.log_signal.emit(" Signaling Saint2 download thread to cancel.")
self.download_thread.cancel()
if isinstance(self.download_thread, EromeDownloadThread):
self.log_signal.emit(" Signaling Erome download thread to cancel.")
self.download_thread.cancel()
def _get_domain_for_service(self, service_name: str) -> str: def _get_domain_for_service(self, service_name: str) -> str:
"""Determines the base domain for a given service.""" """Determines the base domain for a given service."""
if not isinstance(service_name, str): if not isinstance(service_name, str):
@ -5939,6 +6178,325 @@ class DownloaderApp (QWidget ):
# Use a QTimer to avoid deep recursion and correctly move to the next item. # Use a QTimer to avoid deep recursion and correctly move to the next item.
QTimer.singleShot(100, self._process_next_favorite_download) QTimer.singleShot(100, self._process_next_favorite_download)
class Saint2DownloadThread(QThread):
"""A dedicated QThread for handling saint2.su downloads."""
progress_signal = pyqtSignal(str)
file_progress_signal = pyqtSignal(str, object)
finished_signal = pyqtSignal(int, int, bool) # dl_count, skip_count, cancelled
def __init__(self, url, output_dir, parent=None):
super().__init__(parent)
self.saint2_url = url
self.output_dir = output_dir
self.is_cancelled = False
def run(self):
download_count = 0
skip_count = 0
self.progress_signal.emit("=" * 40)
self.progress_signal.emit(f"🚀 Starting Saint2.su Download for: {self.saint2_url}")
# Use the new client to get the download info
album_name, files_to_download = fetch_saint2_data(self.saint2_url, self.progress_signal.emit)
if not files_to_download:
self.progress_signal.emit("❌ Failed to extract file information from Saint2. Aborting.")
self.finished_signal.emit(0, 0, self.is_cancelled)
return
# For single media, album_name is the title; for albums, it's the album title
album_path = os.path.join(self.output_dir, album_name)
try:
os.makedirs(album_path, exist_ok=True)
self.progress_signal.emit(f" Saving to folder: '{album_path}'")
except OSError as e:
self.progress_signal.emit(f"❌ Critical error creating directory: {e}")
self.finished_signal.emit(0, len(files_to_download), self.is_cancelled)
return
total_files = len(files_to_download)
session = requests.Session()
for i, file_data in enumerate(files_to_download):
if self.is_cancelled:
self.progress_signal.emit(" Download cancelled by user.")
skip_count = total_files - download_count
break
filename = file_data.get('filename', f'untitled_{i+1}.mp4')
file_url = file_data.get('url')
headers = file_data.get('headers')
filepath = os.path.join(album_path, filename)
if os.path.exists(filepath):
self.progress_signal.emit(f" -> Skip ({i+1}/{total_files}): '{filename}' already exists.")
skip_count += 1
continue
self.progress_signal.emit(f" Downloading ({i+1}/{total_files}): '{filename}'...")
try:
response = session.get(file_url, stream=True, headers=headers, timeout=60)
response.raise_for_status()
total_size = int(response.headers.get('content-length', 0))
downloaded_size = 0
last_update_time = time.time()
with open(filepath, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if self.is_cancelled:
break
if chunk:
f.write(chunk)
downloaded_size += len(chunk)
current_time = time.time()
if total_size > 0 and (current_time - last_update_time) > 0.5:
self.file_progress_signal.emit(filename, (downloaded_size, total_size))
last_update_time = current_time
if self.is_cancelled:
if os.path.exists(filepath): os.remove(filepath)
continue
if total_size > 0:
self.file_progress_signal.emit(filename, (total_size, total_size))
download_count += 1
except requests.exceptions.RequestException as e:
self.progress_signal.emit(f" ❌ Failed to download '{filename}'. Error: {e}")
if os.path.exists(filepath): os.remove(filepath)
skip_count += 1
except Exception as e:
self.progress_signal.emit(f" ❌ An unexpected error occurred with '{filename}': {e}")
if os.path.exists(filepath): os.remove(filepath)
skip_count += 1
self.file_progress_signal.emit("", None)
self.finished_signal.emit(download_count, skip_count, self.is_cancelled)
def cancel(self):
self.is_cancelled = True
self.progress_signal.emit(" Cancellation signal received by Saint2 thread.")
class EromeDownloadThread(QThread):
"""A dedicated QThread for handling erome.com downloads."""
progress_signal = pyqtSignal(str)
file_progress_signal = pyqtSignal(str, object)
finished_signal = pyqtSignal(int, int, bool) # dl_count, skip_count, cancelled
def __init__(self, url, output_dir, parent=None):
super().__init__(parent)
self.erome_url = url
self.output_dir = output_dir
self.is_cancelled = False
def run(self):
download_count = 0
skip_count = 0
self.progress_signal.emit("=" * 40)
self.progress_signal.emit(f"🚀 Starting Erome.com Download for: {self.erome_url}")
album_name, files_to_download = fetch_erome_data(self.erome_url, self.progress_signal.emit)
if not files_to_download:
self.progress_signal.emit("❌ Failed to extract file information from Erome. Aborting.")
self.finished_signal.emit(0, 0, self.is_cancelled)
return
album_path = os.path.join(self.output_dir, album_name)
try:
os.makedirs(album_path, exist_ok=True)
self.progress_signal.emit(f" Saving to folder: '{album_path}'")
except OSError as e:
self.progress_signal.emit(f"❌ Critical error creating directory: {e}")
self.finished_signal.emit(0, len(files_to_download), self.is_cancelled)
return
total_files = len(files_to_download)
session = requests.Session()
for i, file_data in enumerate(files_to_download):
if self.is_cancelled:
self.progress_signal.emit(" Download cancelled by user.")
skip_count = total_files - download_count
break
filename = file_data.get('filename', f'untitled_{i+1}.mp4')
file_url = file_data.get('url')
headers = file_data.get('headers')
filepath = os.path.join(album_path, filename)
if os.path.exists(filepath):
self.progress_signal.emit(f" -> Skip ({i+1}/{total_files}): '{filename}' already exists.")
skip_count += 1
continue
self.progress_signal.emit(f" Downloading ({i+1}/{total_files}): '{filename}'...")
try:
response = session.get(file_url, stream=True, headers=headers, timeout=60)
response.raise_for_status()
total_size = int(response.headers.get('content-length', 0))
downloaded_size = 0
last_update_time = time.time()
with open(filepath, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if self.is_cancelled:
break
if chunk:
f.write(chunk)
downloaded_size += len(chunk)
current_time = time.time()
if total_size > 0 and (current_time - last_update_time) > 0.5:
self.file_progress_signal.emit(filename, (downloaded_size, total_size))
last_update_time = current_time
if self.is_cancelled:
if os.path.exists(filepath): os.remove(filepath)
continue
if total_size > 0:
self.file_progress_signal.emit(filename, (total_size, total_size))
download_count += 1
except requests.exceptions.RequestException as e:
self.progress_signal.emit(f" ❌ Failed to download '{filename}'. Error: {e}")
if os.path.exists(filepath): os.remove(filepath)
skip_count += 1
except Exception as e:
self.progress_signal.emit(f" ❌ An unexpected error occurred with '{filename}': {e}")
if os.path.exists(filepath): os.remove(filepath)
skip_count += 1
self.file_progress_signal.emit("", None)
self.finished_signal.emit(download_count, skip_count, self.is_cancelled)
def cancel(self):
self.is_cancelled = True
self.progress_signal.emit(" Cancellation signal received by Erome thread.")
class BunkrDownloadThread(QThread):
"""A dedicated QThread for handling Bunkr downloads."""
progress_signal = pyqtSignal(str)
# --- ADD THIS SIGNAL for detailed file progress ---
file_progress_signal = pyqtSignal(str, object)
finished_signal = pyqtSignal(int, int, bool, list)
def __init__(self, url, output_dir, parent=None):
super().__init__(parent)
self.bunkr_url = url
self.output_dir = output_dir
self.is_cancelled = False
class ThreadLogger:
def __init__(self, signal_emitter):
self.signal_emitter = signal_emitter
def info(self, msg, *args, **kwargs):
self.signal_emitter.emit(str(msg))
def error(self, msg, *args, **kwargs):
self.signal_emitter.emit(f"❌ ERROR: {msg}")
def warning(self, msg, *args, **kwargs):
self.signal_emitter.emit(f"⚠️ WARNING: {msg}")
def debug(self, msg, *args, **kwargs):
pass
self.logger = ThreadLogger(self.progress_signal)
def run(self):
download_count = 0
skip_count = 0
self.progress_signal.emit("=" * 40)
self.progress_signal.emit(f"🚀 Starting Bunkr Download for: {self.bunkr_url}")
album_name, files_to_download = fetch_bunkr_data(self.bunkr_url, self.logger)
if not files_to_download:
self.progress_signal.emit("❌ Failed to extract file information from Bunkr. Aborting.")
self.finished_signal.emit(0, 0, self.is_cancelled, [])
return
album_path = os.path.join(self.output_dir, album_name)
try:
os.makedirs(album_path, exist_ok=True)
self.progress_signal.emit(f" Saving to folder: '{album_path}'")
except OSError as e:
self.progress_signal.emit(f"❌ Critical error creating directory: {e}")
self.finished_signal.emit(0, len(files_to_download), self.is_cancelled, [])
return
total_files = len(files_to_download)
for i, file_data in enumerate(files_to_download):
if self.is_cancelled:
self.progress_signal.emit(" Download cancelled by user.")
skip_count = total_files - download_count
break
filename = file_data.get('name', 'untitled_file')
file_url = file_data.get('url')
headers = file_data.get('_http_headers')
filename = re.sub(r'[<>:"/\\|?*]', '_', filename).strip()
filepath = os.path.join(album_path, filename)
if os.path.exists(filepath):
self.progress_signal.emit(f" -> Skip ({i+1}/{total_files}): '{filename}' already exists.")
skip_count += 1
continue
self.progress_signal.emit(f" Downloading ({i+1}/{total_files}): '{filename}'...")
try:
response = requests.get(file_url, stream=True, headers=headers, timeout=60)
response.raise_for_status()
# --- MODIFY THIS BLOCK to calculate and emit progress ---
total_size = int(response.headers.get('content-length', 0))
downloaded_size = 0
last_update_time = time.time()
with open(filepath, 'wb') as f:
for chunk in response.iter_content(chunk_size=8192):
if self.is_cancelled:
break
if chunk:
f.write(chunk)
downloaded_size += len(chunk)
current_time = time.time()
if total_size > 0 and (current_time - last_update_time) > 0.5:
self.file_progress_signal.emit(filename, (downloaded_size, total_size))
last_update_time = current_time
if self.is_cancelled:
if os.path.exists(filepath): os.remove(filepath)
continue
# Emit final progress to show 100%
if total_size > 0:
self.file_progress_signal.emit(filename, (total_size, total_size))
download_count += 1
# --- END MODIFICATION ---
except requests.exceptions.RequestException as e:
self.progress_signal.emit(f" ❌ Failed to download '{filename}'. Error: {e}")
if os.path.exists(filepath): os.remove(filepath)
skip_count += 1
except Exception as e:
self.progress_signal.emit(f" ❌ An unexpected error occurred with '{filename}': {e}")
if os.path.exists(filepath): os.remove(filepath)
skip_count += 1
# Clear the progress label when finished
self.file_progress_signal.emit("", None)
self.finished_signal.emit(download_count, skip_count, self.is_cancelled, [])
def cancel(self):
self.is_cancelled = True
self.progress_signal.emit(" Cancellation signal received by Bunkr thread.")
class ExternalLinkDownloadThread (QThread ): class ExternalLinkDownloadThread (QThread ):
"""A QThread to handle downloading multiple external links sequentially.""" """A QThread to handle downloading multiple external links sequentially."""
progress_signal =pyqtSignal (str ) progress_signal =pyqtSignal (str )

View File

@ -138,10 +138,12 @@ def prepare_cookies_for_request(use_cookie_flag, cookie_text_input, selected_coo
return None return None
# In src/utils/network_utils.py
def extract_post_info(url_string): def extract_post_info(url_string):
""" """
Parses a URL string to extract the service, user ID, and post ID. Parses a URL string to extract the service, user ID, and post ID.
UPDATED to support Discord server/channel URLs. UPDATED to support Discord, Bunkr, and nhentai URLs.
Args: Args:
url_string (str): The URL to parse. url_string (str): The URL to parse.
@ -150,30 +152,40 @@ def extract_post_info(url_string):
tuple: A tuple containing (service, id1, id2). tuple: A tuple containing (service, id1, id2).
For posts: (service, user_id, post_id). For posts: (service, user_id, post_id).
For Discord: ('discord', server_id, channel_id). For Discord: ('discord', server_id, channel_id).
For Bunkr: ('bunkr', full_url, None).
For nhentai: ('nhentai', gallery_id, None).
""" """
if not isinstance(url_string, str) or not url_string.strip(): if not isinstance(url_string, str) or not url_string.strip():
return None, None, None return None, None, None
stripped_url = url_string.strip()
# --- Bunkr Check ---
bunkr_pattern = re.compile(
r"(?:https?://)?(?:[a-zA-Z0-9-]+\.)?bunkr\.(?:si|la|ws|red|black|media|site|is|to|ac|cr|ci|fi|pk|ps|sk|ph|su|ru)|bunkrr\.ru"
)
if bunkr_pattern.search(stripped_url):
return 'bunkr', stripped_url, None
# --- nhentai Check ---
nhentai_match = re.search(r'nhentai\.net/g/(\d+)', stripped_url)
if nhentai_match:
return 'nhentai', nhentai_match.group(1), None
# --- Kemono/Coomer/Discord Parsing ---
try: try:
parsed_url = urlparse(url_string.strip()) parsed_url = urlparse(stripped_url)
path_parts = [part for part in parsed_url.path.strip('/').split('/') if part] path_parts = [part for part in parsed_url.path.strip('/').split('/') if part]
# Check for new Discord URL format first
# e.g., /discord/server/891670433978531850/1252332668805189723
if len(path_parts) >= 3 and path_parts[0].lower() == 'discord' and path_parts[1].lower() == 'server': if len(path_parts) >= 3 and path_parts[0].lower() == 'discord' and path_parts[1].lower() == 'server':
service = 'discord' return 'discord', path_parts[2], path_parts[3] if len(path_parts) >= 4 else None
server_id = path_parts[2]
channel_id = path_parts[3] if len(path_parts) >= 4 else None
return service, server_id, channel_id
# Standard creator/post format: /<service>/user/<user_id>/post/<post_id>
if len(path_parts) >= 3 and path_parts[1].lower() == 'user': if len(path_parts) >= 3 and path_parts[1].lower() == 'user':
service = path_parts[0] service = path_parts[0]
user_id = path_parts[2] user_id = path_parts[2]
post_id = path_parts[4] if len(path_parts) >= 5 and path_parts[3].lower() == 'post' else None post_id = path_parts[4] if len(path_parts) >= 5 and path_parts[3].lower() == 'post' else None
return service, user_id, post_id return service, user_id, post_id
# API format: /api/v1/<service>/user/<user_id>...
if len(path_parts) >= 5 and path_parts[0:2] == ['api', 'v1'] and path_parts[3].lower() == 'user': if len(path_parts) >= 5 and path_parts[0:2] == ['api', 'v1'] and path_parts[3].lower() == 'user':
service = path_parts[2] service = path_parts[2]
user_id = path_parts[4] user_id = path_parts[4]