Kemono-Downloader/src/ui/classes/downloader_factory.py

206 lines
10 KiB
Python
Raw Normal View History

2025-10-18 16:03:34 +05:30
import re
import requests
from urllib.parse import urlparse
2025-10-20 13:37:27 +05:30
# Utility Imports
2025-10-18 16:03:34 +05:30
from ...utils.network_utils import prepare_cookies_for_request
2025-10-26 12:08:48 +05:30
from ...utils.file_utils import clean_folder_name
2025-10-20 13:37:27 +05:30
# Downloader Thread Imports (Alphabetical Order Recommended)
2025-10-18 16:03:34 +05:30
from .allcomic_downloader_thread import AllcomicDownloadThread
from .booru_downloader_thread import BooruDownloadThread
from .bunkr_downloader_thread import BunkrDownloadThread
2025-10-20 13:37:27 +05:30
from .discord_downloader_thread import DiscordDownloadThread # Official Discord
2025-10-18 16:03:34 +05:30
from .drive_downloader_thread import DriveDownloadThread
from .erome_downloader_thread import EromeDownloadThread
from .external_link_downloader_thread import ExternalLinkDownloadThread
from .fap_nation_downloader_thread import FapNationDownloadThread
from .hentai2read_downloader_thread import Hentai2readDownloadThread
2025-10-20 13:37:27 +05:30
from .kemono_discord_downloader_thread import KemonoDiscordDownloadThread
2025-10-18 16:03:34 +05:30
from .mangadex_downloader_thread import MangaDexDownloadThread
from .nhentai_downloader_thread import NhentaiDownloadThread
from .pixeldrain_downloader_thread import PixeldrainDownloadThread
2025-10-20 13:37:27 +05:30
from .rule34video_downloader_thread import Rule34VideoDownloadThread
2025-10-18 16:03:34 +05:30
from .saint2_downloader_thread import Saint2DownloadThread
from .simp_city_downloader_thread import SimpCityDownloadThread
from .toonily_downloader_thread import ToonilyDownloadThread
2025-12-14 19:33:17 +05:30
from .deviantart_downloader_thread import DeviantArtDownloadThread
2025-12-28 09:23:20 +05:30
from .hentaifox_downloader_thread import HentaiFoxDownloadThread
2025-10-18 16:03:34 +05:30
def create_downloader_thread(main_app, api_url, service, id1, id2, effective_output_dir_for_run):
"""
Factory function to create and configure the correct QThread for a given URL.
2025-10-20 13:37:27 +05:30
Returns a configured QThread instance, a specific error string ("COOKIE_ERROR", "FETCH_ERROR"),
or None if no special handler is found (indicating fallback to generic BackendDownloadThread).
2025-10-18 16:03:34 +05:30
"""
2025-10-20 13:37:27 +05:30
2025-10-18 16:03:34 +05:30
# Handler for Booru sites (Danbooru, Gelbooru)
if service in ['danbooru', 'gelbooru']:
api_key = main_app.api_key_input.text().strip()
user_id = main_app.user_id_input.text().strip()
return BooruDownloadThread(
url=api_url, output_dir=effective_output_dir_for_run,
api_key=api_key, user_id=user_id, parent=main_app
)
2025-10-20 13:37:27 +05:30
# Handler for cloud storage sites (Mega, GDrive, Dropbox, GoFile)
2025-10-18 16:03:34 +05:30
platform = None
if 'mega.nz' in api_url or 'mega.io' in api_url: platform = 'mega'
elif 'drive.google.com' in api_url: platform = 'gdrive'
elif 'dropbox.com' in api_url: platform = 'dropbox'
elif 'gofile.io' in api_url: platform = 'gofile'
if platform:
use_post_subfolder = main_app.use_subfolder_per_post_checkbox.isChecked()
return DriveDownloadThread(
api_url, effective_output_dir_for_run, platform, use_post_subfolder,
2025-10-20 13:37:27 +05:30
main_app.cancellation_event, main_app.pause_event, main_app.log_signal.emit,
parent=main_app # Pass parent for consistency
2025-10-18 16:03:34 +05:30
)
# Handler for Erome
if 'erome.com' in api_url:
return EromeDownloadThread(api_url, effective_output_dir_for_run, main_app)
# Handler for MangaDex
if 'mangadex.org' in api_url:
return MangaDexDownloadThread(api_url, effective_output_dir_for_run, main_app)
# Handler for Saint2
2025-10-20 13:37:27 +05:30
is_saint2_url = service == 'saint2' or 'saint2.su' in api_url or 'saint2.pk' in api_url # Add more domains if needed
if is_saint2_url and api_url.strip().lower() != 'saint2.su': # Exclude batch mode trigger if using URL input
2025-10-18 16:03:34 +05:30
return Saint2DownloadThread(api_url, effective_output_dir_for_run, main_app)
# Handler for SimpCity
if service == 'simpcity':
cookies = prepare_cookies_for_request(
2025-10-20 13:37:27 +05:30
use_cookie_flag=True, # SimpCity requires cookies
cookie_text_input=main_app.simpcity_cookie_text_input.text(), # Use dedicated input
selected_cookie_file_path=main_app.selected_cookie_filepath, # Use shared selection
app_base_dir=main_app.app_base_dir,
logger_func=main_app.log_signal.emit,
target_domain='simpcity.cr' # Specific domain
2025-10-18 16:03:34 +05:30
)
if not cookies:
2025-10-20 13:37:27 +05:30
main_app.log_signal.emit("❌ SimpCity requires valid cookies. Please provide them.")
return "COOKIE_ERROR" # Sentinel value for cookie failure
2025-10-18 16:03:34 +05:30
return SimpCityDownloadThread(api_url, id2, effective_output_dir_for_run, cookies, main_app)
2025-10-20 13:37:27 +05:30
# Handler for Rule34Video
2025-10-18 16:03:34 +05:30
if service == 'rule34video':
main_app.log_signal.emit(" Rule34Video.com URL detected. Starting dedicated downloader.")
2025-10-20 13:37:27 +05:30
return Rule34VideoDownloadThread(api_url, effective_output_dir_for_run, main_app) # id1 (video_id) is used inside the thread
2025-10-26 12:08:48 +05:30
# HANDLER FOR KEMONO DISCORD (Place BEFORE official Discord)
2025-10-20 13:37:27 +05:30
elif service == 'discord' and any(domain in api_url for domain in ['kemono.cr', 'kemono.su', 'kemono.party']):
main_app.log_signal.emit(" Kemono Discord URL detected. Starting dedicated downloader.")
cookies = prepare_cookies_for_request(
use_cookie_flag=main_app.use_cookie_checkbox.isChecked(), # Respect UI setting
cookie_text_input=main_app.cookie_text_input.text(),
selected_cookie_file_path=main_app.selected_cookie_filepath,
app_base_dir=main_app.app_base_dir,
logger_func=main_app.log_signal.emit,
target_domain='kemono.cr' # Primary Kemono domain, adjust if needed
)
# KemonoDiscordDownloadThread expects parent for events
return KemonoDiscordDownloadThread(
server_id=id1,
channel_id=id2,
output_dir=effective_output_dir_for_run,
cookies_dict=cookies,
parent=main_app
)
2025-10-18 16:03:34 +05:30
# Handler for official Discord URLs
2025-10-20 13:37:27 +05:30
elif service == 'discord' and 'discord.com' in api_url:
main_app.log_signal.emit(" Official Discord URL detected. Starting dedicated downloader.")
token = main_app.remove_from_filename_input.text().strip() # Token is in the "Remove Words" field for Discord
if not token:
main_app.log_signal.emit("❌ Official Discord requires an Authorization Token in the 'Remove Words' field.")
return None # Or a specific error sentinel
2025-10-18 16:03:34 +05:30
limit_text = main_app.discord_message_limit_input.text().strip()
2025-10-20 13:37:27 +05:30
message_limit = int(limit_text) if limit_text.isdigit() else None
mode = main_app.discord_download_scope # Should be 'pdf' or 'files'
2025-10-18 16:03:34 +05:30
return DiscordDownloadThread(
2025-10-20 13:37:27 +05:30
mode=mode,
session=requests.Session(), # Create a session for this thread
token=token,
output_dir=effective_output_dir_for_run,
server_id=id1,
channel_id=id2,
url=api_url,
app_base_dir=main_app.app_base_dir,
limit=message_limit,
parent=main_app # Pass main_app for events/signals
2025-10-18 16:03:34 +05:30
)
2025-10-20 13:37:27 +05:30
# Check specific domains or rely on service name if extract_post_info provides it
if service == 'allcomic' or 'allcomic.com' in api_url or 'allporncomic.com' in api_url:
2025-10-18 16:03:34 +05:30
return AllcomicDownloadThread(api_url, effective_output_dir_for_run, main_app)
# Handler for Hentai2Read
2025-10-20 13:37:27 +05:30
if service == 'hentai2read' or 'hentai2read.com' in api_url:
2025-10-18 16:03:34 +05:30
return Hentai2readDownloadThread(api_url, effective_output_dir_for_run, main_app)
# Handler for Fap-Nation
2025-10-20 13:37:27 +05:30
if service == 'fap-nation' or 'fap-nation.com' in api_url or 'fap-nation.org' in api_url:
2025-10-18 16:03:34 +05:30
use_post_subfolder = main_app.use_subfolder_per_post_checkbox.isChecked()
2025-10-20 13:37:27 +05:30
# Ensure signals are passed correctly if needed by the thread
2025-10-18 16:03:34 +05:30
return FapNationDownloadThread(
api_url, effective_output_dir_for_run, use_post_subfolder,
main_app.pause_event, main_app.cancellation_event, main_app.actual_gui_signals, main_app
)
# Handler for Pixeldrain
2025-10-20 13:37:27 +05:30
if service == 'pixeldrain' or 'pixeldrain.com' in api_url:
return PixeldrainDownloadThread(api_url, effective_output_dir_for_run, main_app) # URL contains the ID
2025-10-18 16:03:34 +05:30
# Handler for nHentai
if service == 'nhentai':
from ...core.nhentai_client import fetch_nhentai_gallery
2025-10-20 13:37:27 +05:30
main_app.log_signal.emit(f" nHentai gallery ID {id1} detected. Fetching gallery data...")
2025-10-18 16:03:34 +05:30
gallery_data = fetch_nhentai_gallery(id1, main_app.log_signal.emit)
if not gallery_data:
2025-10-20 13:37:27 +05:30
main_app.log_signal.emit(f"❌ Failed to fetch nHentai gallery data for ID {id1}.")
2025-10-18 16:03:34 +05:30
return "FETCH_ERROR" # Sentinel value for fetch failure
return NhentaiDownloadThread(gallery_data, effective_output_dir_for_run, main_app)
# Handler for Toonily
2025-10-20 13:37:27 +05:30
if service == 'toonily' or 'toonily.com' in api_url:
2025-10-18 16:03:34 +05:30
return ToonilyDownloadThread(api_url, effective_output_dir_for_run, main_app)
# Handler for Bunkr
if service == 'bunkr':
2025-10-20 13:37:27 +05:30
# id1 contains the full URL or album ID from extract_post_info
2025-10-18 16:03:34 +05:30
return BunkrDownloadThread(id1, effective_output_dir_for_run, main_app)
2025-12-14 19:33:17 +05:30
# Handler for DeviantArt
if service == 'deviantart':
main_app.log_signal.emit(f" DeviantArt URL detected. Starting dedicated downloader.")
return DeviantArtDownloadThread(
url=api_url,
output_dir=effective_output_dir_for_run,
pause_event=main_app.pause_event,
cancellation_event=main_app.cancellation_event,
parent=main_app
)
2025-12-28 09:23:20 +05:30
# Handler for HentaiFox (New)
if 'hentaifox.com' in api_url or service == 'hentaifox':
main_app.log_signal.emit("🦊 HentaiFox URL detected.")
return HentaiFoxDownloadThread(
url_or_id=api_url,
output_dir=effective_output_dir_for_run,
parent=main_app
)
2025-12-14 19:33:17 +05:30
# ----------------------
2025-10-20 13:37:27 +05:30
# --- Fallback ---
# If no specific handler matched based on service name or URL pattern, return None.
# This signals main_window.py to use the generic BackendDownloadThread/PostProcessorWorker
# which uses the standard Kemono/Coomer post API.
main_app.log_signal.emit(f" No specialized downloader found for service '{service}' and URL '{api_url[:50]}...'. Using generic downloader.")
2025-10-18 16:03:34 +05:30
return None