mirror of
https://github.com/Yuvi9587/Kemono-Downloader.git
synced 2025-12-17 15:36:51 +00:00
Commit
This commit is contained in:
parent
69ddc2ca08
commit
9082c0c94a
205
drive.py
205
drive.py
@ -1,205 +0,0 @@
|
||||
from mega import Mega
|
||||
import os
|
||||
import requests
|
||||
import traceback
|
||||
from urllib .parse import urlparse ,urlunparse ,parse_qs ,urlencode
|
||||
|
||||
try :
|
||||
import gdown
|
||||
GDOWN_AVAILABLE =True
|
||||
except ImportError :
|
||||
GDOWN_AVAILABLE =False
|
||||
|
||||
def download_mega_file (mega_link ,download_path =".",logger_func =print ):
|
||||
"""
|
||||
Downloads a file from a public Mega.nz link.
|
||||
|
||||
Args:
|
||||
mega_link (str): The public Mega.nz link to the file.
|
||||
download_path (str, optional): The directory to save the downloaded file.
|
||||
Defaults to the current directory.
|
||||
logger_func (callable, optional): Function to use for logging. Defaults to print.
|
||||
"""
|
||||
logger_func ("drive.py: download_mega_file called.")
|
||||
logger_func (f"drive.py: mega_link='{mega_link}', download_path='{download_path}'")
|
||||
|
||||
logger_func ("drive.py: Initializing Mega client (Mega())...")
|
||||
try:
|
||||
mega_client = Mega()
|
||||
except Exception as e_init:
|
||||
logger_func(f"drive.py: ERROR during Mega() instantiation: {e_init}")
|
||||
traceback.print_exc()
|
||||
raise
|
||||
logger_func ("drive.py: Mega client initialized. Logging in anonymously (m.login())...")
|
||||
try:
|
||||
m = mega_client.login()
|
||||
except Exception as e_login:
|
||||
logger_func(f"drive.py: ERROR during m.login(): {e_login}")
|
||||
traceback.print_exc()
|
||||
raise
|
||||
logger_func ("drive.py: Logged in anonymously.")
|
||||
|
||||
logger_func (f"drive.py: Attempting to download from: {mega_link }")
|
||||
|
||||
try :
|
||||
if not os .path .exists (download_path ):
|
||||
logger_func (f"drive.py: Download path '{download_path }' does not exist. Creating it...")
|
||||
os .makedirs (download_path ,exist_ok =True )
|
||||
logger_func (f"drive.py: Download path ensured: '{download_path }'")
|
||||
|
||||
logger_func (f"drive.py: Calling m.download_url for '{mega_link }' to '{download_path }'...")
|
||||
|
||||
# The download_url method returns the local file path of the downloaded file.
|
||||
# It takes dest_path (directory) and dest_filename (optional).
|
||||
# If dest_filename is None, it uses the name from get_public_url_info().
|
||||
downloaded_file_path = m.download_url(mega_link, dest_path=download_path, dest_filename=None)
|
||||
|
||||
logger_func(f"drive.py: m.download_url returned: {downloaded_file_path}")
|
||||
|
||||
if downloaded_file_path and os.path.exists(downloaded_file_path):
|
||||
logger_func(f"drive.py: File downloaded successfully! Saved as: {downloaded_file_path}")
|
||||
# Optional: Verify size if possible, but get_public_url_info is another network call
|
||||
# and might be redundant or problematic if the download itself worked.
|
||||
elif downloaded_file_path:
|
||||
logger_func(f"drive.py: m.download_url returned a path '{downloaded_file_path}', but it does not exist on disk. Download may have failed silently or path is incorrect.")
|
||||
raise Exception(f"Mega download_url returned path '{downloaded_file_path}' which was not found.")
|
||||
else :
|
||||
logger_func ("drive.py: Download failed. m.download_url did not return a valid file path.")
|
||||
raise Exception ("Mega download_url did not return a file path or failed.")
|
||||
|
||||
except PermissionError as e:
|
||||
logger_func(f"drive.py: PermissionError: {e}. Denied to write to '{download_path}'. Please check permissions.")
|
||||
raise
|
||||
except FileNotFoundError as e:
|
||||
logger_func(f"drive.py: FileNotFoundError: {e}. The path '{download_path}' is invalid.")
|
||||
raise
|
||||
except requests.exceptions.ConnectionError as e: # More specific for network
|
||||
logger_func(f"drive.py: requests.exceptions.ConnectionError: {e}. Network problem during Mega operation.")
|
||||
raise
|
||||
except requests.exceptions.RequestException as e: # General requests error
|
||||
logger_func(f"drive.py: requests.exceptions.RequestException: {e} during request to Mega.")
|
||||
raise
|
||||
except Exception as e: # Catch-all for other errors from mega.py or os calls
|
||||
logger_func(f"drive.py: An unexpected error occurred during Mega download: {e}")
|
||||
traceback.print_exc() # Print full traceback for unexpected errors
|
||||
raise
|
||||
|
||||
def download_gdrive_file (gdrive_link ,download_path =".",logger_func =print ):
|
||||
"""
|
||||
Downloads a file from a public Google Drive link.
|
||||
|
||||
Args:
|
||||
gdrive_link (str): The public Google Drive link to the file.
|
||||
download_path (str, optional): The directory to save the downloaded file.
|
||||
Defaults to the current directory.
|
||||
logger_func (callable, optional): Function to use for logging. Defaults to print.
|
||||
"""
|
||||
if not GDOWN_AVAILABLE :
|
||||
logger_func ("❌ Error: gdown library is not installed. Cannot download from Google Drive.")
|
||||
logger_func ("Please install it: pip install gdown")
|
||||
raise ImportError ("gdown library not found. Please install it: pip install gdown")
|
||||
|
||||
logger_func (f"Attempting to download from Google Drive: {gdrive_link }")
|
||||
try :
|
||||
if not os .path .exists (download_path ):
|
||||
logger_func (f"Download path '{download_path }' does not exist. Creating it...")
|
||||
os .makedirs (download_path ,exist_ok =True )
|
||||
|
||||
logger_func (f"Starting Google Drive download to '{download_path }'...")
|
||||
|
||||
output_file_path =gdown .download (gdrive_link ,output =download_path ,quiet =False ,fuzzy =True )
|
||||
|
||||
if output_file_path and os .path .exists (os .path .join (download_path ,os .path .basename (output_file_path ))):
|
||||
logger_func (f"✅ Google Drive file downloaded successfully: {output_file_path }")
|
||||
elif output_file_path :
|
||||
full_path_check =os .path .join (download_path ,output_file_path )
|
||||
if os .path .exists (full_path_check ):
|
||||
logger_func (f"✅ Google Drive file downloaded successfully: {full_path_check }")
|
||||
else :
|
||||
logger_func (f"⚠️ Google Drive download finished, gdown returned '{output_file_path }', but file not found at expected location.")
|
||||
logger_func (f" Please check '{download_path }' for the downloaded file, it might have a different name than expected by gdown's return.")
|
||||
|
||||
files_in_dest =[f for f in os .listdir (download_path )if os .path .isfile (os .path .join (download_path ,f ))]
|
||||
if len (files_in_dest )==1 :
|
||||
logger_func (f" Found one file in destination: {os .path .join (download_path ,files_in_dest [0 ])}. Assuming this is it.")
|
||||
elif len (files_in_dest )>1 and output_file_path in files_in_dest :
|
||||
logger_func (f" Confirmed file '{output_file_path }' exists in '{download_path }'.")
|
||||
else :
|
||||
raise Exception (f"gdown download failed or file not found. Returned: {output_file_path }")
|
||||
else :
|
||||
logger_func ("❌ Google Drive download failed. gdown did not return an output path.")
|
||||
raise Exception ("gdown download failed.")
|
||||
|
||||
except PermissionError :
|
||||
logger_func (f"❌ Error: Permission denied to write to '{download_path }'. Please check permissions.")
|
||||
raise
|
||||
except Exception as e :
|
||||
logger_func (f"❌ An error occurred during Google Drive download: {e }")
|
||||
traceback .print_exc ()
|
||||
raise
|
||||
|
||||
def _get_filename_from_headers (headers ):
|
||||
cd =headers .get ('content-disposition')
|
||||
if not cd :
|
||||
return None
|
||||
fname_match =re .findall ('filename="?([^"]+)"?',cd )
|
||||
if fname_match :
|
||||
return fname_match [0 ].strip ()
|
||||
return None
|
||||
|
||||
def download_dropbox_file (dropbox_link ,download_path =".",logger_func =print ):
|
||||
"""
|
||||
Downloads a file from a public Dropbox link.
|
||||
|
||||
Args:
|
||||
dropbox_link (str): The public Dropbox link to the file.
|
||||
download_path (str, optional): The directory to save the downloaded file.
|
||||
Defaults to the current directory.
|
||||
logger_func (callable, optional): Function to use for logging. Defaults to print.
|
||||
"""
|
||||
logger_func (f"Attempting to download from Dropbox: {dropbox_link }")
|
||||
|
||||
|
||||
parsed_url =urlparse (dropbox_link )
|
||||
query_params =parse_qs (parsed_url .query )
|
||||
query_params ['dl']=['1']
|
||||
new_query =urlencode (query_params ,doseq =True )
|
||||
direct_download_url =urlunparse (parsed_url ._replace (query =new_query ))
|
||||
|
||||
logger_func (f" Using direct download URL: {direct_download_url }")
|
||||
|
||||
try :
|
||||
if not os .path .exists (download_path ):
|
||||
logger_func (f"Download path '{download_path }' does not exist. Creating it...")
|
||||
os .makedirs (download_path ,exist_ok =True )
|
||||
|
||||
with requests .get (direct_download_url ,stream =True ,allow_redirects =True ,timeout =(10 ,300 ))as r :
|
||||
r .raise_for_status ()
|
||||
filename =_get_filename_from_headers (r .headers )or os .path .basename (urlparse (dropbox_link ).path )or "dropbox_downloaded_file"
|
||||
|
||||
filename =re .sub (r'[<>:"/\\|?*]','_',filename )
|
||||
full_save_path =os .path .join (download_path ,filename )
|
||||
logger_func (f"Starting Dropbox download of '{filename }' to '{full_save_path }'...")
|
||||
with open (full_save_path ,'wb')as f :
|
||||
for chunk in r .iter_content (chunk_size =8192 ):
|
||||
f .write (chunk )
|
||||
logger_func (f"✅ Dropbox file downloaded successfully: {full_save_path }")
|
||||
except Exception as e :
|
||||
logger_func (f"❌ An error occurred during Dropbox download: {e }")
|
||||
traceback .print_exc ()
|
||||
raise
|
||||
|
||||
if __name__ =="__main__":
|
||||
|
||||
mega_file_link ="https://mega.nz/file/03oRjBQT#Tcbp5sQVIyPbdmv8sLgbb9Lf9AZvZLdKRSQiuXkNW0k"
|
||||
|
||||
if not mega_file_link .startswith ("https://mega.nz/file/"):
|
||||
print ("Invalid Mega file link format. It should start with 'https://mega.nz/file/'.")
|
||||
else :
|
||||
|
||||
|
||||
script_dir =os .path .dirname (os .path .abspath (__file__ ))
|
||||
download_directory =os .path .join (script_dir ,"mega_downloads")
|
||||
|
||||
print (f"Files will be downloaded to: {download_directory }")
|
||||
download_mega_file (mega_file_link ,download_directory ,logger_func =print )
|
||||
1
src/__init__.py
Normal file
1
src/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
1
src/config/__init__.py
Normal file
1
src/config/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
110
src/config/constants.py
Normal file
110
src/config/constants.py
Normal file
@ -0,0 +1,110 @@
|
||||
# --- Application Metadata ---
|
||||
CONFIG_ORGANIZATION_NAME = "KemonoDownloader"
|
||||
CONFIG_APP_NAME_MAIN = "ApplicationSettings"
|
||||
CONFIG_APP_NAME_TOUR = "ApplicationTour"
|
||||
|
||||
# --- Filename and Folder Naming Styles ---
|
||||
STYLE_POST_TITLE = "post_title"
|
||||
STYLE_ORIGINAL_NAME = "original_name"
|
||||
STYLE_DATE_BASED = "date_based"
|
||||
STYLE_DATE_POST_TITLE = "date_post_title"
|
||||
STYLE_POST_TITLE_GLOBAL_NUMBERING = "post_title_global_numbering"
|
||||
MANGA_DATE_PREFIX_DEFAULT = ""
|
||||
|
||||
# --- Download Scopes ---
|
||||
SKIP_SCOPE_FILES = "files"
|
||||
SKIP_SCOPE_POSTS = "posts"
|
||||
SKIP_SCOPE_BOTH = "both"
|
||||
|
||||
CHAR_SCOPE_TITLE = "title"
|
||||
CHAR_SCOPE_FILES = "files"
|
||||
CHAR_SCOPE_BOTH = "both"
|
||||
CHAR_SCOPE_COMMENTS = "comments"
|
||||
|
||||
FAVORITE_SCOPE_SELECTED_LOCATION = "selected_location"
|
||||
FAVORITE_SCOPE_ARTIST_FOLDERS = "artist_folders"
|
||||
|
||||
# --- Download Status Constants ---
|
||||
FILE_DOWNLOAD_STATUS_SUCCESS = "success"
|
||||
FILE_DOWNLOAD_STATUS_SKIPPED = "skipped"
|
||||
FILE_DOWNLOAD_STATUS_FAILED_RETRYABLE_LATER = "failed_retry_later"
|
||||
FILE_DOWNLOAD_STATUS_FAILED_PERMANENTLY_THIS_SESSION = "failed_permanent_session"
|
||||
|
||||
# --- Threading and Performance ---
|
||||
MAX_THREADS = 200
|
||||
RECOMMENDED_MAX_THREADS = 50
|
||||
SOFT_WARNING_THREAD_THRESHOLD = 40
|
||||
MAX_FILE_THREADS_PER_POST_OR_WORKER = 10
|
||||
POST_WORKER_BATCH_THRESHOLD = 30
|
||||
POST_WORKER_NUM_BATCHES = 4
|
||||
POST_WORKER_BATCH_DELAY_SECONDS = 2.5
|
||||
MAX_POST_WORKERS_WHEN_COMMENT_FILTERING = 3
|
||||
|
||||
# --- Multipart Download Settings ---
|
||||
MIN_SIZE_FOR_MULTIPART_DOWNLOAD = 10 * 1024 * 1024 # 10 MB
|
||||
MAX_PARTS_FOR_MULTIPART_DOWNLOAD = 15
|
||||
|
||||
# --- UI and Settings Keys (for QSettings) ---
|
||||
TOUR_SHOWN_KEY = "neverShowTourAgainV19"
|
||||
MANGA_FILENAME_STYLE_KEY = "mangaFilenameStyleV1"
|
||||
SKIP_WORDS_SCOPE_KEY = "skipWordsScopeV1"
|
||||
ALLOW_MULTIPART_DOWNLOAD_KEY = "allowMultipartDownloadV1"
|
||||
USE_COOKIE_KEY = "useCookieV1"
|
||||
COOKIE_TEXT_KEY = "cookieTextV1"
|
||||
CHAR_FILTER_SCOPE_KEY = "charFilterScopeV1"
|
||||
THEME_KEY = "currentThemeV2"
|
||||
SCAN_CONTENT_IMAGES_KEY = "scanContentForImagesV1"
|
||||
LANGUAGE_KEY = "currentLanguageV1"
|
||||
DOWNLOAD_LOCATION_KEY = "downloadLocationV1"
|
||||
|
||||
# --- UI Constants and Identifiers ---
|
||||
HTML_PREFIX = "<!HTML!>"
|
||||
LOG_DISPLAY_LINKS = "links"
|
||||
LOG_DISPLAY_DOWNLOAD_PROGRESS = "download_progress"
|
||||
|
||||
# --- Dialog Return Codes ---
|
||||
CONFIRM_ADD_ALL_ACCEPTED = 1
|
||||
CONFIRM_ADD_ALL_SKIP_ADDING = 2
|
||||
CONFIRM_ADD_ALL_CANCEL_DOWNLOAD = 3
|
||||
|
||||
# --- File Type Extensions ---
|
||||
IMAGE_EXTENSIONS = {
|
||||
'.jpg', '.jpeg', '.png', '.gif', '.bmp', '.tiff', '.tif', '.webp',
|
||||
'.heic', '.heif', '.svg', '.ico', '.jfif', '.pjpeg', '.pjp', '.avif'
|
||||
}
|
||||
VIDEO_EXTENSIONS = {
|
||||
'.mp4', '.mov', '.mkv', '.webm', '.avi', '.wmv', '.flv', '.mpeg',
|
||||
'.mpg', '.m4v', '.3gp', '.ogv', '.ts', '.vob'
|
||||
}
|
||||
ARCHIVE_EXTENSIONS = {
|
||||
'.zip', '.rar', '.7z', '.tar', '.gz', '.bz2'
|
||||
}
|
||||
AUDIO_EXTENSIONS = {
|
||||
'.mp3', '.wav', '.aac', '.flac', '.ogg', '.wma', '.m4a', '.opus',
|
||||
'.aiff', '.ape', '.mid', '.midi'
|
||||
}
|
||||
|
||||
# --- Text Processing Constants ---
|
||||
MAX_FILENAME_COMPONENT_LENGTH = 150
|
||||
|
||||
# Words to ignore when creating folder names from titles
|
||||
FOLDER_NAME_STOP_WORDS = {
|
||||
"a", "alone", "am", "an", "and", "at", "be", "by", "com",
|
||||
"for", "he", "her", "his", "i", "im", "in", "is", "it", "its",
|
||||
"me", "my", "net", "not", "of", "on", "or", "org", "our",
|
||||
"s", "she", "so", "the", "their", "they", "this",
|
||||
"to", "ve", "was", "we", "were", "with", "www", "you", "your",
|
||||
}
|
||||
|
||||
# Additional words to ignore specifically for creator-level downloads
|
||||
CREATOR_DOWNLOAD_DEFAULT_FOLDER_IGNORE_WORDS = {
|
||||
"poll", "cover", "fan-art", "fanart", "requests", "request", "holiday",
|
||||
"batch", "open", "closed", "winner", "loser", "wip",
|
||||
"update", "news", "discussion", "question", "stream", "video", "sketchbook",
|
||||
# Months and days
|
||||
"jan", "january", "feb", "february", "mar", "march", "apr", "april",
|
||||
"may", "jun", "june", "jul", "july", "aug", "august", "sep", "september",
|
||||
"oct", "october", "nov", "november", "dec", "december",
|
||||
"mon", "monday", "tue", "tuesday", "wed", "wednesday", "thu", "thursday",
|
||||
"fri", "friday", "sat", "saturday", "sun", "sunday"
|
||||
}
|
||||
1
src/core/__init__.py
Normal file
1
src/core/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
321
src/core/api_client.py
Normal file
321
src/core/api_client.py
Normal file
@ -0,0 +1,321 @@
|
||||
# --- Standard Library Imports ---
|
||||
import time
|
||||
import traceback
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# --- Third-Party Library Imports ---
|
||||
import requests
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ..utils.network_utils import extract_post_info, prepare_cookies_for_request
|
||||
from ..config.constants import (
|
||||
STYLE_DATE_POST_TITLE
|
||||
)
|
||||
|
||||
|
||||
def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_event=None, pause_event=None, cookies_dict=None):
|
||||
"""
|
||||
Fetches a single page of posts from the API with retry logic.
|
||||
|
||||
Args:
|
||||
api_url_base (str): The base URL for the user's posts.
|
||||
headers (dict): The request headers.
|
||||
offset (int): The offset for pagination.
|
||||
logger (callable): Function to log messages.
|
||||
cancellation_event (threading.Event): Event to signal cancellation.
|
||||
pause_event (threading.Event): Event to signal pause.
|
||||
cookies_dict (dict): A dictionary of cookies to include in the request.
|
||||
|
||||
Returns:
|
||||
list: A list of post data dictionaries from the API.
|
||||
|
||||
Raises:
|
||||
RuntimeError: If the fetch fails after all retries or encounters a non-retryable error.
|
||||
"""
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger(" Fetch cancelled before request.")
|
||||
raise RuntimeError("Fetch operation cancelled by user.")
|
||||
if pause_event and pause_event.is_set():
|
||||
logger(" Post fetching paused...")
|
||||
while pause_event.is_set():
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger(" Post fetching cancelled while paused.")
|
||||
raise RuntimeError("Fetch operation cancelled by user.")
|
||||
time.sleep(0.5)
|
||||
logger(" Post fetching resumed.")
|
||||
|
||||
paginated_url = f'{api_url_base}?o={offset}'
|
||||
max_retries = 3
|
||||
retry_delay = 5
|
||||
|
||||
for attempt in range(max_retries):
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
raise RuntimeError("Fetch operation cancelled by user during retry loop.")
|
||||
|
||||
log_message = f" Fetching: {paginated_url} (Page approx. {offset // 50 + 1})"
|
||||
if attempt > 0:
|
||||
log_message += f" (Attempt {attempt + 1}/{max_retries})"
|
||||
logger(log_message)
|
||||
|
||||
try:
|
||||
response = requests.get(paginated_url, headers=headers, timeout=(15, 90), cookies=cookies_dict)
|
||||
response.raise_for_status()
|
||||
|
||||
if 'application/json' not in response.headers.get('Content-Type', '').lower():
|
||||
logger(f"⚠️ Unexpected content type from API: {response.headers.get('Content-Type')}. Body: {response.text[:200]}")
|
||||
return []
|
||||
|
||||
return response.json()
|
||||
|
||||
except (requests.exceptions.Timeout, requests.exceptions.ConnectionError) as e:
|
||||
logger(f" ⚠️ Retryable network error on page fetch (Attempt {attempt + 1}): {e}")
|
||||
if attempt < max_retries - 1:
|
||||
delay = retry_delay * (2 ** attempt)
|
||||
logger(f" Retrying in {delay} seconds...")
|
||||
time.sleep(delay)
|
||||
continue
|
||||
else:
|
||||
logger(f" ❌ Failed to fetch page after {max_retries} attempts.")
|
||||
raise RuntimeError(f"Timeout or connection error fetching offset {offset}")
|
||||
except requests.exceptions.RequestException as e:
|
||||
err_msg = f"Error fetching offset {offset}: {e}"
|
||||
if e.response is not None:
|
||||
err_msg += f" (Status: {e.response.status_code}, Body: {e.response.text[:200]})"
|
||||
raise RuntimeError(err_msg)
|
||||
except ValueError as e: # JSON decode error
|
||||
raise RuntimeError(f"Error decoding JSON from offset {offset}: {e}. Response: {response.text[:200]}")
|
||||
|
||||
raise RuntimeError(f"Failed to fetch page {paginated_url} after all attempts.")
|
||||
|
||||
|
||||
def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger, cancellation_event=None, pause_event=None, cookies_dict=None):
|
||||
"""Fetches all comments for a specific post."""
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
raise RuntimeError("Comment fetch operation cancelled by user.")
|
||||
|
||||
comments_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{post_id}/comments"
|
||||
logger(f" Fetching comments: {comments_api_url}")
|
||||
|
||||
try:
|
||||
response = requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict)
|
||||
response.raise_for_status()
|
||||
return response.json()
|
||||
except requests.exceptions.RequestException as e:
|
||||
raise RuntimeError(f"Error fetching comments for post {post_id}: {e}")
|
||||
except ValueError as e:
|
||||
raise RuntimeError(f"Error decoding JSON from comments API for post {post_id}: {e}")
|
||||
|
||||
def download_from_api (
|
||||
api_url_input ,
|
||||
logger =print ,
|
||||
start_page =None ,
|
||||
end_page =None ,
|
||||
manga_mode =False ,
|
||||
cancellation_event =None ,
|
||||
pause_event =None ,
|
||||
use_cookie =False ,
|
||||
cookie_text ="",
|
||||
selected_cookie_file =None ,
|
||||
app_base_dir =None ,
|
||||
manga_filename_style_for_sort_check =None
|
||||
):
|
||||
headers ={
|
||||
'User-Agent':'Mozilla/5.0',
|
||||
'Accept':'application/json'
|
||||
}
|
||||
|
||||
service ,user_id ,target_post_id =extract_post_info (api_url_input )
|
||||
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Download_from_api cancelled at start.")
|
||||
return
|
||||
|
||||
parsed_input_url_for_domain =urlparse (api_url_input )
|
||||
api_domain =parsed_input_url_for_domain .netloc
|
||||
if not any (d in api_domain .lower ()for d in ['kemono.su','kemono.party','coomer.su','coomer.party']):
|
||||
logger (f"⚠️ Unrecognized domain '{api_domain }' from input URL. Defaulting to kemono.su for API calls.")
|
||||
api_domain ="kemono.su"
|
||||
cookies_for_api =None
|
||||
if use_cookie and app_base_dir :
|
||||
cookies_for_api =prepare_cookies_for_request (use_cookie ,cookie_text ,selected_cookie_file ,app_base_dir ,logger ,target_domain =api_domain )
|
||||
if target_post_id :
|
||||
direct_post_api_url =f"https://{api_domain }/api/v1/{service }/user/{user_id }/post/{target_post_id }"
|
||||
logger (f" Attempting direct fetch for target post: {direct_post_api_url }")
|
||||
try :
|
||||
direct_response =requests .get (direct_post_api_url ,headers =headers ,timeout =(10 ,30 ),cookies =cookies_for_api )
|
||||
direct_response .raise_for_status ()
|
||||
direct_post_data =direct_response .json ()
|
||||
if isinstance (direct_post_data ,list )and direct_post_data :
|
||||
direct_post_data =direct_post_data [0 ]
|
||||
if isinstance (direct_post_data ,dict )and 'post'in direct_post_data and isinstance (direct_post_data ['post'],dict ):
|
||||
direct_post_data =direct_post_data ['post']
|
||||
if isinstance (direct_post_data ,dict )and direct_post_data .get ('id')==target_post_id :
|
||||
logger (f" ✅ Direct fetch successful for post {target_post_id }.")
|
||||
yield [direct_post_data ]
|
||||
return
|
||||
else :
|
||||
response_type =type (direct_post_data ).__name__
|
||||
response_snippet =str (direct_post_data )[:200 ]
|
||||
logger (f" ⚠️ Direct fetch for post {target_post_id } returned unexpected data (Type: {response_type }, Snippet: '{response_snippet }'). Falling back to pagination.")
|
||||
except requests .exceptions .RequestException as e :
|
||||
logger (f" ⚠️ Direct fetch failed for post {target_post_id }: {e }. Falling back to pagination.")
|
||||
except Exception as e :
|
||||
logger (f" ⚠️ Unexpected error during direct fetch for post {target_post_id }: {e }. Falling back to pagination.")
|
||||
if not service or not user_id :
|
||||
logger (f"❌ Invalid URL or could not extract service/user: {api_url_input }")
|
||||
return
|
||||
if target_post_id and (start_page or end_page ):
|
||||
logger ("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).")
|
||||
|
||||
is_manga_mode_fetch_all_and_sort_oldest_first =manga_mode and (manga_filename_style_for_sort_check !=STYLE_DATE_POST_TITLE )and not target_post_id
|
||||
api_base_url =f"https://{api_domain }/api/v1/{service }/user/{user_id }"
|
||||
page_size =50
|
||||
if is_manga_mode_fetch_all_and_sort_oldest_first :
|
||||
logger (f" Manga Mode (Style: {manga_filename_style_for_sort_check if manga_filename_style_for_sort_check else 'Default'} - Oldest First Sort Active): Fetching all posts to sort by date...")
|
||||
all_posts_for_manga_mode =[]
|
||||
current_offset_manga =0
|
||||
if start_page and start_page >1 :
|
||||
current_offset_manga =(start_page -1 )*page_size
|
||||
logger (f" Manga Mode: Starting fetch from page {start_page } (offset {current_offset_manga }).")
|
||||
elif start_page :
|
||||
logger (f" Manga Mode: Starting fetch from page 1 (offset 0).")
|
||||
if end_page :
|
||||
logger (f" Manga Mode: Will fetch up to page {end_page }.")
|
||||
while True :
|
||||
if pause_event and pause_event .is_set ():
|
||||
logger (" Manga mode post fetching paused...")
|
||||
while pause_event .is_set ():
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Manga mode post fetching cancelled while paused.")
|
||||
break
|
||||
time .sleep (0.5 )
|
||||
if not (cancellation_event and cancellation_event .is_set ()):logger (" Manga mode post fetching resumed.")
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Manga mode post fetching cancelled.")
|
||||
break
|
||||
current_page_num_manga =(current_offset_manga //page_size )+1
|
||||
if end_page and current_page_num_manga >end_page :
|
||||
logger (f" Manga Mode: Reached specified end page ({end_page }). Stopping post fetch.")
|
||||
break
|
||||
try :
|
||||
posts_batch_manga =fetch_posts_paginated (api_base_url ,headers ,current_offset_manga ,logger ,cancellation_event ,pause_event ,cookies_dict =cookies_for_api )
|
||||
if not isinstance (posts_batch_manga ,list ):
|
||||
logger (f"❌ API Error (Manga Mode): Expected list of posts, got {type (posts_batch_manga )}.")
|
||||
break
|
||||
if not posts_batch_manga :
|
||||
logger ("✅ Reached end of posts (Manga Mode fetch all).")
|
||||
if start_page and not end_page and current_page_num_manga <start_page :
|
||||
logger (f" Manga Mode: No posts found on or after specified start page {start_page }.")
|
||||
elif end_page and current_page_num_manga <=end_page and not all_posts_for_manga_mode :
|
||||
logger (f" Manga Mode: No posts found within the specified page range ({start_page or 1 }-{end_page }).")
|
||||
break
|
||||
all_posts_for_manga_mode .extend (posts_batch_manga )
|
||||
current_offset_manga +=page_size
|
||||
time .sleep (0.6 )
|
||||
except RuntimeError as e :
|
||||
if "cancelled by user"in str (e ).lower ():
|
||||
logger (f"ℹ️ Manga mode pagination stopped due to cancellation: {e }")
|
||||
else :
|
||||
logger (f"❌ {e }\n Aborting manga mode pagination.")
|
||||
break
|
||||
except Exception as e :
|
||||
logger (f"❌ Unexpected error during manga mode fetch: {e }")
|
||||
traceback .print_exc ()
|
||||
break
|
||||
if cancellation_event and cancellation_event .is_set ():return
|
||||
if all_posts_for_manga_mode :
|
||||
logger (f" Manga Mode: Fetched {len (all_posts_for_manga_mode )} total posts. Sorting by publication date (oldest first)...")
|
||||
def sort_key_tuple (post ):
|
||||
published_date_str =post .get ('published')
|
||||
added_date_str =post .get ('added')
|
||||
post_id_str =post .get ('id',"0")
|
||||
primary_sort_val ="0000-00-00T00:00:00"
|
||||
if published_date_str :
|
||||
primary_sort_val =published_date_str
|
||||
elif added_date_str :
|
||||
logger (f" ⚠️ Post ID {post_id_str } missing 'published' date, using 'added' date '{added_date_str }' for primary sorting.")
|
||||
primary_sort_val =added_date_str
|
||||
else :
|
||||
logger (f" ⚠️ Post ID {post_id_str } missing both 'published' and 'added' dates. Placing at start of sort (using default earliest date).")
|
||||
secondary_sort_val =0
|
||||
try :
|
||||
secondary_sort_val =int (post_id_str )
|
||||
except ValueError :
|
||||
logger (f" ⚠️ Post ID '{post_id_str }' is not a valid integer for secondary sorting, using 0.")
|
||||
return (primary_sort_val ,secondary_sort_val )
|
||||
all_posts_for_manga_mode .sort (key =sort_key_tuple )
|
||||
for i in range (0 ,len (all_posts_for_manga_mode ),page_size ):
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Manga mode post yielding cancelled.")
|
||||
break
|
||||
yield all_posts_for_manga_mode [i :i +page_size ]
|
||||
return
|
||||
|
||||
|
||||
|
||||
if manga_mode and not target_post_id and (manga_filename_style_for_sort_check ==STYLE_DATE_POST_TITLE ):
|
||||
logger (f" Manga Mode (Style: {STYLE_DATE_POST_TITLE }): Processing posts in default API order (newest first).")
|
||||
|
||||
current_page_num =1
|
||||
current_offset =0
|
||||
processed_target_post_flag =False
|
||||
if start_page and start_page >1 and not target_post_id :
|
||||
current_offset =(start_page -1 )*page_size
|
||||
current_page_num =start_page
|
||||
logger (f" Starting from page {current_page_num } (calculated offset {current_offset }).")
|
||||
while True :
|
||||
if pause_event and pause_event .is_set ():
|
||||
logger (" Post fetching loop paused...")
|
||||
while pause_event .is_set ():
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Post fetching loop cancelled while paused.")
|
||||
break
|
||||
time .sleep (0.5 )
|
||||
if not (cancellation_event and cancellation_event .is_set ()):logger (" Post fetching loop resumed.")
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Post fetching loop cancelled.")
|
||||
break
|
||||
if target_post_id and processed_target_post_flag :
|
||||
break
|
||||
if not target_post_id and end_page and current_page_num >end_page :
|
||||
logger (f"✅ Reached specified end page ({end_page }) for creator feed. Stopping.")
|
||||
break
|
||||
try :
|
||||
posts_batch =fetch_posts_paginated (api_base_url ,headers ,current_offset ,logger ,cancellation_event ,pause_event ,cookies_dict =cookies_for_api )
|
||||
if not isinstance (posts_batch ,list ):
|
||||
logger (f"❌ API Error: Expected list of posts, got {type (posts_batch )} at page {current_page_num } (offset {current_offset }).")
|
||||
break
|
||||
except RuntimeError as e :
|
||||
if "cancelled by user"in str (e ).lower ():
|
||||
logger (f"ℹ️ Pagination stopped due to cancellation: {e }")
|
||||
else :
|
||||
logger (f"❌ {e }\n Aborting pagination at page {current_page_num } (offset {current_offset }).")
|
||||
break
|
||||
except Exception as e :
|
||||
logger (f"❌ Unexpected error fetching page {current_page_num } (offset {current_offset }): {e }")
|
||||
traceback .print_exc ()
|
||||
break
|
||||
if not posts_batch :
|
||||
if target_post_id and not processed_target_post_flag :
|
||||
logger (f"❌ Target post {target_post_id } not found after checking all available pages (API returned no more posts at offset {current_offset }).")
|
||||
elif not target_post_id :
|
||||
if current_page_num ==(start_page or 1 ):
|
||||
logger (f"😕 No posts found on the first page checked (page {current_page_num }, offset {current_offset }).")
|
||||
else :
|
||||
logger (f"✅ Reached end of posts (no more content from API at offset {current_offset }).")
|
||||
break
|
||||
if target_post_id and not processed_target_post_flag :
|
||||
matching_post =next ((p for p in posts_batch if str (p .get ('id'))==str (target_post_id )),None )
|
||||
if matching_post :
|
||||
logger (f"🎯 Found target post {target_post_id } on page {current_page_num } (offset {current_offset }).")
|
||||
yield [matching_post ]
|
||||
processed_target_post_flag =True
|
||||
elif not target_post_id :
|
||||
yield posts_batch
|
||||
if processed_target_post_flag :
|
||||
break
|
||||
current_offset +=page_size
|
||||
current_page_num +=1
|
||||
time .sleep (0.6 )
|
||||
if target_post_id and not processed_target_post_flag and not (cancellation_event and cancellation_event .is_set ()):
|
||||
logger (f"❌ Target post {target_post_id } could not be found after checking all relevant pages (final check after loop).")
|
||||
241
src/core/manager.py
Normal file
241
src/core/manager.py
Normal file
@ -0,0 +1,241 @@
|
||||
# --- Standard Library Imports ---
|
||||
import threading
|
||||
import time
|
||||
import os
|
||||
import json
|
||||
import traceback
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed, Future
|
||||
|
||||
# --- Local Application Imports ---
|
||||
# These imports reflect the new, organized project structure.
|
||||
from .api_client import download_from_api
|
||||
from .workers import PostProcessorWorker, DownloadThread
|
||||
from ..config.constants import (
|
||||
STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING,
|
||||
MAX_THREADS, POST_WORKER_BATCH_THRESHOLD, POST_WORKER_NUM_BATCHES,
|
||||
POST_WORKER_BATCH_DELAY_SECONDS
|
||||
)
|
||||
from ..utils.file_utils import clean_folder_name
|
||||
|
||||
|
||||
class DownloadManager:
|
||||
"""
|
||||
Manages the entire download lifecycle, acting as a bridge between the UI
|
||||
and the backend workers. It handles thread pools, task submission,
|
||||
and state management for a download session.
|
||||
"""
|
||||
|
||||
def __init__(self, progress_queue):
|
||||
"""
|
||||
Initializes the DownloadManager.
|
||||
|
||||
Args:
|
||||
progress_queue (queue.Queue): A thread-safe queue for sending
|
||||
status updates to the UI.
|
||||
"""
|
||||
self.progress_queue = progress_queue
|
||||
self.thread_pool = None
|
||||
self.active_futures = []
|
||||
|
||||
# --- Session State ---
|
||||
self.cancellation_event = threading.Event()
|
||||
self.pause_event = threading.Event()
|
||||
self.is_running = False
|
||||
|
||||
self.total_posts = 0
|
||||
self.processed_posts = 0
|
||||
self.total_downloads = 0
|
||||
self.total_skips = 0
|
||||
self.all_kept_original_filenames = []
|
||||
|
||||
def _log(self, message):
|
||||
"""Puts a progress message into the queue for the UI."""
|
||||
self.progress_queue.put({'type': 'progress', 'payload': (message,)})
|
||||
|
||||
def start_session(self, config, restore_data=None):
|
||||
"""
|
||||
Starts a new download session based on the provided configuration.
|
||||
This is the main entry point called by the UI.
|
||||
|
||||
Args:
|
||||
config (dict): A dictionary containing all settings from the UI.
|
||||
restore_data (dict, optional): Data from a previous, interrupted session.
|
||||
"""
|
||||
if self.is_running:
|
||||
self._log("❌ Cannot start a new session: A session is already in progress.")
|
||||
return
|
||||
|
||||
# --- Reset state for the new session ---
|
||||
self.is_running = True
|
||||
self.cancellation_event.clear()
|
||||
self.pause_event.clear()
|
||||
self.active_futures.clear()
|
||||
self.total_posts = 0
|
||||
self.processed_posts = 0
|
||||
self.total_downloads = 0
|
||||
self.total_skips = 0
|
||||
self.all_kept_original_filenames = []
|
||||
|
||||
# --- Decide execution strategy (multi-threaded vs. single-threaded) ---
|
||||
is_single_post = bool(config.get('target_post_id_from_initial_url'))
|
||||
use_multithreading = config.get('use_multithreading', True)
|
||||
is_manga_sequential = config.get('manga_mode_active') and config.get('manga_filename_style') in [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
|
||||
|
||||
should_use_multithreading_for_posts = use_multithreading and not is_single_post and not is_manga_sequential
|
||||
|
||||
if should_use_multithreading_for_posts:
|
||||
# Start a separate thread to manage fetching and queuing to the thread pool
|
||||
fetcher_thread = threading.Thread(
|
||||
target=self._fetch_and_queue_posts_for_pool,
|
||||
args=(config, restore_data),
|
||||
daemon=True
|
||||
)
|
||||
fetcher_thread.start()
|
||||
else:
|
||||
# For single posts or sequential manga mode, use a single worker thread
|
||||
# which is simpler and ensures order.
|
||||
self._start_single_threaded_session(config)
|
||||
|
||||
def _start_single_threaded_session(self, config):
|
||||
"""Handles downloads that are best processed by a single worker thread."""
|
||||
self._log("ℹ️ Initializing single-threaded download process...")
|
||||
|
||||
# The original DownloadThread is now a pure Python thread, not a QThread.
|
||||
# We run its `run` method in a standard Python thread.
|
||||
self.worker_thread = threading.Thread(
|
||||
target=self._run_single_worker,
|
||||
args=(config,),
|
||||
daemon=True
|
||||
)
|
||||
self.worker_thread.start()
|
||||
|
||||
def _run_single_worker(self, config):
|
||||
"""Target function for the single-worker thread."""
|
||||
try:
|
||||
# Pass the queue directly to the worker for it to send updates
|
||||
worker = DownloadThread(config, self.progress_queue)
|
||||
worker.run() # This is the main blocking call for this thread
|
||||
except Exception as e:
|
||||
self._log(f"❌ CRITICAL ERROR in single-worker thread: {e}")
|
||||
self._log(traceback.format_exc())
|
||||
finally:
|
||||
self.is_running = False
|
||||
|
||||
def _fetch_and_queue_posts_for_pool(self, config, restore_data):
|
||||
"""
|
||||
Fetches all posts from the API and submits them as tasks to a thread pool.
|
||||
This method runs in its own dedicated thread to avoid blocking.
|
||||
"""
|
||||
try:
|
||||
num_workers = min(config.get('num_threads', 4), MAX_THREADS)
|
||||
self.thread_pool = ThreadPoolExecutor(max_workers=num_workers, thread_name_prefix='PostWorker_')
|
||||
|
||||
# Fetch posts
|
||||
# In a real implementation, this would call `api_client.download_from_api`
|
||||
if restore_data:
|
||||
all_posts = restore_data['all_posts_data']
|
||||
processed_ids = set(restore_data['processed_post_ids'])
|
||||
posts_to_process = [p for p in all_posts if p.get('id') not in processed_ids]
|
||||
self.total_posts = len(all_posts)
|
||||
self.processed_posts = len(processed_ids)
|
||||
self._log(f"🔄 Restoring session. {len(posts_to_process)} posts remaining.")
|
||||
else:
|
||||
posts_to_process = self._get_all_posts(config)
|
||||
self.total_posts = len(posts_to_process)
|
||||
self.processed_posts = 0
|
||||
|
||||
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
|
||||
|
||||
if not posts_to_process:
|
||||
self._log("✅ No new posts to process.")
|
||||
return
|
||||
|
||||
# Submit tasks to the pool
|
||||
for post_data in posts_to_process:
|
||||
if self.cancellation_event.is_set():
|
||||
break
|
||||
# Each PostProcessorWorker gets the queue to send its own updates
|
||||
worker = PostProcessorWorker(post_data, config, self.progress_queue)
|
||||
future = self.thread_pool.submit(worker.process)
|
||||
future.add_done_callback(self._handle_future_result)
|
||||
self.active_futures.append(future)
|
||||
|
||||
except Exception as e:
|
||||
self._log(f"❌ CRITICAL ERROR in post fetcher thread: {e}")
|
||||
self._log(traceback.format_exc())
|
||||
finally:
|
||||
# Wait for all submitted tasks to complete before shutting down
|
||||
if self.thread_pool:
|
||||
self.thread_pool.shutdown(wait=True)
|
||||
self.is_running = False
|
||||
self._log("🏁 All processing tasks have completed.")
|
||||
# Emit final signal
|
||||
self.progress_queue.put({
|
||||
'type': 'finished',
|
||||
'payload': (self.total_downloads, self.total_skips, self.cancellation_event.is_set(), self.all_kept_original_filenames)
|
||||
})
|
||||
|
||||
def _get_all_posts(self, config):
|
||||
"""Helper to fetch all posts using the API client."""
|
||||
all_posts = []
|
||||
# This generator yields batches of posts
|
||||
post_generator = download_from_api(
|
||||
api_url_input=config['api_url'],
|
||||
logger=self._log,
|
||||
# ... pass other relevant config keys ...
|
||||
cancellation_event=self.cancellation_event,
|
||||
pause_event=self.pause_event
|
||||
)
|
||||
for batch in post_generator:
|
||||
all_posts.extend(batch)
|
||||
return all_posts
|
||||
|
||||
def _handle_future_result(self, future: Future):
|
||||
"""Callback executed when a worker task completes."""
|
||||
if self.cancellation_event.is_set():
|
||||
return
|
||||
|
||||
with threading.Lock(): # Protect shared counters
|
||||
self.processed_posts += 1
|
||||
try:
|
||||
if future.cancelled():
|
||||
self._log("⚠️ A post processing task was cancelled.")
|
||||
self.total_skips += 1
|
||||
else:
|
||||
result = future.result()
|
||||
# Unpack result tuple from the worker
|
||||
(dl_count, skip_count, kept_originals,
|
||||
retryable, permanent, history) = result
|
||||
self.total_downloads += dl_count
|
||||
self.total_skips += skip_count
|
||||
self.all_kept_original_filenames.extend(kept_originals)
|
||||
|
||||
# Queue up results for UI to handle
|
||||
if retryable:
|
||||
self.progress_queue.put({'type': 'retryable_failure', 'payload': (retryable,)})
|
||||
if permanent:
|
||||
self.progress_queue.put({'type': 'permanent_failure', 'payload': (permanent,)})
|
||||
if history:
|
||||
self.progress_queue.put({'type': 'post_processed_history', 'payload': (history,)})
|
||||
|
||||
except Exception as e:
|
||||
self._log(f"❌ Worker task resulted in an exception: {e}")
|
||||
self.total_skips += 1 # Count errored posts as skipped
|
||||
|
||||
# Update overall progress
|
||||
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
|
||||
|
||||
def cancel_session(self):
|
||||
"""Cancels the current running session."""
|
||||
if not self.is_running:
|
||||
return
|
||||
self._log("⚠️ Cancellation requested by user...")
|
||||
self.cancellation_event.set()
|
||||
|
||||
# For single thread mode, the worker checks the event
|
||||
# For multi-thread mode, shut down the pool
|
||||
if self.thread_pool:
|
||||
# Don't wait, just cancel pending futures and let the fetcher thread exit
|
||||
self.thread_pool.shutdown(wait=False, cancel_futures=True)
|
||||
|
||||
self.is_running = False
|
||||
@ -1,813 +1,46 @@
|
||||
import os
|
||||
import time
|
||||
import requests
|
||||
import re
|
||||
import threading
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import queue
|
||||
import re
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import uuid
|
||||
import http
|
||||
import html
|
||||
import json
|
||||
import queue
|
||||
import hashlib
|
||||
import http .client
|
||||
import traceback
|
||||
from concurrent .futures import ThreadPoolExecutor ,Future ,CancelledError ,as_completed
|
||||
from collections import deque
|
||||
import html
|
||||
from PyQt5 .QtCore import QObject ,pyqtSignal ,QThread ,QMutex ,QMutexLocker
|
||||
from collections import deque
|
||||
import hashlib
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed, CancelledError, Future
|
||||
from io import BytesIO
|
||||
from urllib .parse import urlparse
|
||||
import uuid
|
||||
try :
|
||||
from mega import Mega
|
||||
import requests
|
||||
# --- Third-Party Library Imports ---
|
||||
try:
|
||||
from PIL import Image
|
||||
except ImportError:
|
||||
Image = None
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5 .QtCore import Qt ,QThread ,pyqtSignal ,QMutex ,QMutexLocker ,QObject ,QTimer ,QSettings ,QStandardPaths ,QCoreApplication ,QUrl ,QSize ,QProcess
|
||||
# --- Local Application Imports ---
|
||||
from .api_client import download_from_api, fetch_post_comments
|
||||
from ..services.multipart_downloader import download_file_in_parts, MULTIPART_DOWNLOADER_AVAILABLE
|
||||
from ..services.drive_downloader import (
|
||||
download_mega_file, download_gdrive_file, download_dropbox_file
|
||||
)
|
||||
# Corrected Imports:
|
||||
from ..utils.file_utils import (
|
||||
is_image, is_video, is_zip, is_rar, is_archive, is_audio, KNOWN_NAMES,
|
||||
clean_filename, clean_folder_name
|
||||
)
|
||||
from ..utils.network_utils import prepare_cookies_for_request, get_link_platform
|
||||
from ..utils.text_utils import (
|
||||
is_title_match_for_character, is_filename_match_for_character, strip_html_tags,
|
||||
extract_folder_name_from_title, # This was the function causing the error
|
||||
match_folders_from_title, match_folders_from_filename_enhanced
|
||||
)
|
||||
from ..config.constants import *
|
||||
|
||||
|
||||
try :
|
||||
from drive import download_mega_file as drive_download_mega_file ,download_gdrive_file ,download_dropbox_file
|
||||
|
||||
|
||||
|
||||
except ImportError as drive_import_err :
|
||||
print (f"ERROR importing from drive.py: {drive_import_err }. External drive downloads will fail.")
|
||||
except ImportError :
|
||||
print ("ERROR: mega.py library not found. Please install it: pip install mega.py")
|
||||
try :
|
||||
from PIL import Image
|
||||
except ImportError :
|
||||
print ("ERROR: Pillow library not found. Please install it: pip install Pillow")
|
||||
Image =None
|
||||
try :
|
||||
from multipart_downloader import download_file_in_parts
|
||||
MULTIPART_DOWNLOADER_AVAILABLE =True
|
||||
except ImportError as e :
|
||||
print (f"Warning: multipart_downloader.py not found or import error: {e }. Multi-part downloads will be disabled.")
|
||||
MULTIPART_DOWNLOADER_AVAILABLE =False
|
||||
def download_file_in_parts (*args ,**kwargs ):return False ,0 ,None ,None
|
||||
from io import BytesIO
|
||||
STYLE_POST_TITLE ="post_title"
|
||||
STYLE_ORIGINAL_NAME ="original_name"
|
||||
STYLE_DATE_BASED ="date_based"
|
||||
STYLE_DATE_POST_TITLE ="date_post_title"
|
||||
MANGA_DATE_PREFIX_DEFAULT =""
|
||||
STYLE_POST_TITLE_GLOBAL_NUMBERING ="post_title_global_numbering"
|
||||
SKIP_SCOPE_FILES ="files"
|
||||
SKIP_SCOPE_POSTS ="posts"
|
||||
SKIP_SCOPE_BOTH ="both"
|
||||
CHAR_SCOPE_TITLE ="title"
|
||||
CHAR_SCOPE_FILES ="files"
|
||||
CHAR_SCOPE_BOTH ="both"
|
||||
CHAR_SCOPE_COMMENTS ="comments"
|
||||
FILE_DOWNLOAD_STATUS_SUCCESS ="success"
|
||||
FILE_DOWNLOAD_STATUS_SKIPPED ="skipped"
|
||||
FILE_DOWNLOAD_STATUS_FAILED_RETRYABLE_LATER ="failed_retry_later"
|
||||
FILE_DOWNLOAD_STATUS_FAILED_PERMANENTLY_THIS_SESSION ="failed_permanent_session"
|
||||
fastapi_app =None
|
||||
KNOWN_NAMES =[]
|
||||
MIN_SIZE_FOR_MULTIPART_DOWNLOAD =10 *1024 *1024
|
||||
GOFILE_GUEST_TOKEN =None
|
||||
MAX_PARTS_FOR_MULTIPART_DOWNLOAD =15
|
||||
MAX_FILENAME_COMPONENT_LENGTH =150
|
||||
IMAGE_EXTENSIONS ={
|
||||
'.jpg','.jpeg','.png','.gif','.bmp','.tiff','.tif','.webp',
|
||||
'.heic','.heif','.svg','.ico','.jfif','.pjpeg','.pjp','.avif'
|
||||
}
|
||||
VIDEO_EXTENSIONS ={
|
||||
'.mp4','.mov','.mkv','.webm','.avi','.wmv','.flv','.mpeg',
|
||||
'.mpg','.m4v','.3gp','.ogv','.ts','.vob'
|
||||
}
|
||||
ARCHIVE_EXTENSIONS ={
|
||||
'.zip','.rar','.7z','.tar','.gz','.bz2'
|
||||
}
|
||||
AUDIO_EXTENSIONS ={
|
||||
'.mp3','.wav','.aac','.flac','.ogg','.wma','.m4a','.opus',
|
||||
'.aiff','.ape','.mid','.midi'
|
||||
}
|
||||
FOLDER_NAME_STOP_WORDS ={
|
||||
"a","alone","am","an","and","at","be","blues","but","by","com",
|
||||
"for","grown","hard","he","her","his","hitting","i","im","in","is","it","its",
|
||||
"me","much","my","net","not","of","on","or","org","our","please",
|
||||
"right","s","she","so","technically","tell","the","their","they","this",
|
||||
"to","ve","was","we","well","were","with","www","year","you","your",
|
||||
}
|
||||
|
||||
CREATOR_DOWNLOAD_DEFAULT_FOLDER_IGNORE_WORDS ={
|
||||
"poll","cover","fan-art","fanart","requests","request","holiday","suggest","suggestions",
|
||||
"batch","open","closed","winner","loser","minor","adult","wip",
|
||||
"update","news","discussion","question","stream","video","sketchbook","artwork",
|
||||
|
||||
"1","2","3","4","5","6","7","8","9","10",
|
||||
"11","12","13","14","15","16","17","18","19","20",
|
||||
"one","two","three","four","five","six","seven","eight","nine","ten",
|
||||
"eleven","twelve","thirteen","fourteen","fifteen","sixteen","seventeen",
|
||||
"eighteen","nineteen","twenty",
|
||||
|
||||
"jan","january","feb","february","mar","march","apr","april",
|
||||
"may","jun","june","jul","july","aug","august","sep","september",
|
||||
"oct","october","nov","november","dec","december",
|
||||
|
||||
"mon","monday","tue","tuesday","wed","wednesday","thu","thursday",
|
||||
"fri","friday","sat","saturday","sun","sunday"
|
||||
}
|
||||
|
||||
|
||||
KNOWN_TXT_MATCH_CLEANUP_PATTERNS =[
|
||||
r'\bcum\b',
|
||||
r'\bnsfw\b',
|
||||
r'\bsfw\b',
|
||||
r'\bweb\b',
|
||||
r'\bhd\b',
|
||||
r'\bhi\s*res\b',
|
||||
r'\bhigh\s*res\b',
|
||||
r'\b\d+p\b',
|
||||
r'\b\d+k\b',
|
||||
r'\[OC\]',
|
||||
r'\[Request(?:s)?\]',
|
||||
r'\bCommission\b',
|
||||
r'\bComm\b',
|
||||
r'\bPreview\b',
|
||||
]
|
||||
|
||||
def parse_cookie_string (cookie_string ):
|
||||
"""Parses a 'name=value; name2=value2' cookie string into a dict."""
|
||||
cookies ={}
|
||||
if cookie_string :
|
||||
for item in cookie_string .split (';'):
|
||||
parts =item .split ('=',1 )
|
||||
if len (parts )==2 :
|
||||
name =parts [0 ].strip ()
|
||||
value =parts [1 ].strip ()
|
||||
if name :
|
||||
cookies [name ]=value
|
||||
return cookies if cookies else None
|
||||
def load_cookies_from_netscape_file (filepath ,logger_func ,target_domain_filter =None ):
|
||||
"""Loads cookies from a Netscape-formatted cookies.txt file.
|
||||
If target_domain_filter is provided, only cookies for that domain (or its subdomains) are returned.
|
||||
"""
|
||||
cookies ={}
|
||||
loaded_for_target_domain_count =0
|
||||
total_cookies_in_file =0
|
||||
try :
|
||||
with open (filepath ,'r',encoding ='utf-8')as f :
|
||||
for line_num ,line in enumerate (f ,1 ):
|
||||
line =line .strip ()
|
||||
if not line or line .startswith ('#'):
|
||||
continue
|
||||
parts =line .split ('\t')
|
||||
total_cookies_in_file +=1
|
||||
if len (parts )==7 :
|
||||
cookie_domain_from_file =parts [0 ]
|
||||
name =parts [5 ]
|
||||
value =parts [6 ]
|
||||
if name :
|
||||
if target_domain_filter :
|
||||
|
||||
|
||||
|
||||
host_to_match =target_domain_filter .lower ()
|
||||
cookie_domain_norm =cookie_domain_from_file .lower ()
|
||||
is_match =False
|
||||
if cookie_domain_norm .startswith ('.'):
|
||||
|
||||
|
||||
|
||||
if host_to_match ==cookie_domain_norm [1 :]or host_to_match .endswith (cookie_domain_norm ):
|
||||
is_match =True
|
||||
else :
|
||||
|
||||
|
||||
if host_to_match ==cookie_domain_norm :
|
||||
is_match =True
|
||||
if is_match :
|
||||
cookies [name ]=value
|
||||
loaded_for_target_domain_count +=1
|
||||
else :
|
||||
cookies [name ]=value
|
||||
if target_domain_filter :
|
||||
logger_func (f" 🍪 Scanned {total_cookies_in_file } cookies in '{os .path .basename (filepath )}'. Loaded {loaded_for_target_domain_count } for domain '{target_domain_filter }'.")
|
||||
else :
|
||||
logger_func (f" 🍪 Loaded {len (cookies )} cookies from '{os .path .basename (filepath )}' (no domain filter).")
|
||||
return cookies if cookies else None
|
||||
except FileNotFoundError :
|
||||
logger_func (f" 🍪 Cookie file '{os .path .basename (filepath )}' not found at expected location.")
|
||||
return None
|
||||
except Exception as e :
|
||||
logger_func (f" 🍪 Error parsing cookie file '{os .path .basename (filepath )}': {e }")
|
||||
return None
|
||||
def is_title_match_for_character (post_title ,character_name_filter ):
|
||||
if not post_title or not character_name_filter :
|
||||
return False
|
||||
safe_filter =str (character_name_filter ).strip ()
|
||||
if not safe_filter :
|
||||
return False
|
||||
pattern =r"(?i)\b"+re .escape (safe_filter )+r"\b"
|
||||
match_result =bool (re .search (pattern ,post_title ))
|
||||
return match_result
|
||||
def is_filename_match_for_character (filename ,character_name_filter ):
|
||||
if not filename or not character_name_filter :
|
||||
return False
|
||||
safe_filter =str (character_name_filter ).strip ().lower ()
|
||||
if not safe_filter :
|
||||
return False
|
||||
match_result =safe_filter in filename .lower ()
|
||||
return match_result
|
||||
def clean_folder_name (name ):
|
||||
if not isinstance (name ,str ):name =str (name )
|
||||
cleaned =re .sub (r'[^\w\s\-\_\.\(\)]','',name )
|
||||
cleaned =cleaned .strip ()
|
||||
cleaned =re .sub (r'\s+',' ',cleaned )
|
||||
if cleaned :
|
||||
words =cleaned .split (' ')
|
||||
filtered_words =[word for word in words if word .lower ()not in FOLDER_NAME_STOP_WORDS and word ]
|
||||
cleaned =' '.join (filtered_words )
|
||||
cleaned =cleaned .strip ()
|
||||
if not cleaned :
|
||||
return "untitled_folder"
|
||||
if len (cleaned )>MAX_FILENAME_COMPONENT_LENGTH :
|
||||
cleaned =cleaned [:MAX_FILENAME_COMPONENT_LENGTH ]
|
||||
temp_name =cleaned
|
||||
while len (temp_name )>0 and (temp_name .endswith ('.')or temp_name .endswith (' ')):
|
||||
temp_name =temp_name [:-1 ]
|
||||
return temp_name if temp_name else "untitled_folder"
|
||||
def clean_filename (name ):
|
||||
if not isinstance (name ,str ):name =str (name )
|
||||
cleaned =re .sub (r'[^\w\s\-\_\.\(\)]','',name )
|
||||
cleaned =cleaned .strip ()
|
||||
cleaned =re .sub (r'\s+',' ',cleaned )
|
||||
if not cleaned :return "untitled_file"
|
||||
base_name ,ext =os .path .splitext (cleaned )
|
||||
max_base_len =MAX_FILENAME_COMPONENT_LENGTH -len (ext )
|
||||
if len (base_name )>max_base_len :
|
||||
if max_base_len >0 :
|
||||
base_name =base_name [:max_base_len ]
|
||||
else :
|
||||
return cleaned [:MAX_FILENAME_COMPONENT_LENGTH ]if cleaned else "untitled_file"
|
||||
final_name =base_name +ext
|
||||
return final_name if final_name else "untitled_file"
|
||||
def strip_html_tags (html_text ):
|
||||
if not html_text :return ""
|
||||
text =html .unescape (str (html_text ))
|
||||
text_after_tag_removal =re .sub (r'<[^>]+>',' ',text )
|
||||
cleaned_text =re .sub (r'\s+',' ',text_after_tag_removal ).strip ()
|
||||
return cleaned_text
|
||||
def extract_folder_name_from_title (title ,unwanted_keywords ):
|
||||
if not title :return 'Uncategorized'
|
||||
title_lower =title .lower ()
|
||||
tokens =re .findall (r'\b[\w\-]+\b',title_lower )
|
||||
for token in tokens :
|
||||
clean_token =clean_folder_name (token )
|
||||
if clean_token and clean_token .lower ()not in unwanted_keywords :
|
||||
return clean_token
|
||||
cleaned_full_title =clean_folder_name (title )
|
||||
return cleaned_full_title if cleaned_full_title else 'Uncategorized'
|
||||
def match_folders_from_title (title ,names_to_match ,unwanted_keywords ):
|
||||
"""
|
||||
Matches folder names from a title based on a list of known name objects.
|
||||
Each name object in names_to_match is expected to be a dict:
|
||||
{'name': 'PrimaryFolderName', 'aliases': ['alias1', 'alias2', ...]}
|
||||
"""
|
||||
if not title or not names_to_match :
|
||||
return []
|
||||
|
||||
|
||||
cleaned_title_for_matching =title
|
||||
for pat_str in KNOWN_TXT_MATCH_CLEANUP_PATTERNS :
|
||||
cleaned_title_for_matching =re .sub (pat_str ,' ',cleaned_title_for_matching ,flags =re .IGNORECASE )
|
||||
|
||||
|
||||
cleaned_title_for_matching =re .sub (r'\s+',' ',cleaned_title_for_matching ).strip ()
|
||||
|
||||
title_lower =cleaned_title_for_matching .lower ()
|
||||
matched_cleaned_names =set ()
|
||||
sorted_name_objects =sorted (names_to_match ,key =lambda x :len (x .get ("name","")),reverse =True )
|
||||
for name_obj in sorted_name_objects :
|
||||
primary_folder_name =name_obj .get ("name")
|
||||
aliases =name_obj .get ("aliases",[])
|
||||
if not primary_folder_name or not aliases :
|
||||
continue
|
||||
for alias in aliases :
|
||||
alias_lower =alias .lower ()
|
||||
if not alias_lower :continue
|
||||
pattern =r'\b'+re .escape (alias_lower )+r'\b'
|
||||
if re .search (pattern ,title_lower ):
|
||||
cleaned_primary_name =clean_folder_name (primary_folder_name )
|
||||
if cleaned_primary_name .lower ()not in unwanted_keywords :
|
||||
matched_cleaned_names .add (cleaned_primary_name )
|
||||
break
|
||||
return sorted (list (matched_cleaned_names ))
|
||||
|
||||
def match_folders_from_filename_enhanced (filename ,names_to_match ,unwanted_keywords ):
|
||||
if not filename or not names_to_match :
|
||||
return []
|
||||
|
||||
filename_lower =filename .lower ()
|
||||
matched_primary_names =set ()
|
||||
|
||||
|
||||
|
||||
alias_map_to_primary =[]
|
||||
for name_obj in names_to_match :
|
||||
primary_folder_name =name_obj .get ("name")
|
||||
if not primary_folder_name :
|
||||
continue
|
||||
|
||||
cleaned_primary_name =clean_folder_name (primary_folder_name )
|
||||
|
||||
if not cleaned_primary_name or cleaned_primary_name .lower ()in unwanted_keywords :
|
||||
continue
|
||||
|
||||
aliases_for_obj =name_obj .get ("aliases",[])
|
||||
for alias in aliases_for_obj :
|
||||
alias_lower =alias .lower ()
|
||||
if alias_lower :
|
||||
alias_map_to_primary .append ((alias_lower ,cleaned_primary_name ))
|
||||
|
||||
alias_map_to_primary .sort (key =lambda x :len (x [0 ]),reverse =True )
|
||||
|
||||
for alias_lower ,primary_name_for_alias in alias_map_to_primary :
|
||||
if filename_lower .startswith (alias_lower ):
|
||||
if primary_name_for_alias not in matched_primary_names :
|
||||
matched_primary_names .add (primary_name_for_alias )
|
||||
|
||||
return sorted (list (matched_primary_names ))
|
||||
|
||||
def is_image (filename ):
|
||||
if not filename :return False
|
||||
_ ,ext =os .path .splitext (filename )
|
||||
return ext .lower ()in IMAGE_EXTENSIONS
|
||||
def is_video (filename ):
|
||||
if not filename :return False
|
||||
_ ,ext =os .path .splitext (filename )
|
||||
return ext .lower ()in VIDEO_EXTENSIONS
|
||||
def is_zip (filename ):
|
||||
if not filename :return False
|
||||
return filename .lower ().endswith ('.zip')
|
||||
def is_rar (filename ):
|
||||
if not filename :return False
|
||||
return filename .lower ().endswith ('.rar')
|
||||
def is_archive (filename ):
|
||||
if not filename :return False
|
||||
_ ,ext =os .path .splitext (filename )
|
||||
return ext .lower ()in ARCHIVE_EXTENSIONS
|
||||
def is_audio (filename ):
|
||||
if not filename :return False
|
||||
_ ,ext =os .path .splitext (filename )
|
||||
return ext .lower ()in AUDIO_EXTENSIONS
|
||||
def is_post_url (url ):
|
||||
if not isinstance (url ,str ):return False
|
||||
return '/post/'in urlparse (url ).path
|
||||
def extract_post_info (url_string ):
|
||||
service ,user_id ,post_id =None ,None ,None
|
||||
if not isinstance (url_string ,str )or not url_string .strip ():return None ,None ,None
|
||||
try :
|
||||
parsed_url =urlparse (url_string .strip ())
|
||||
domain =parsed_url .netloc .lower ()
|
||||
is_kemono =any (d in domain for d in ['kemono.su','kemono.party'])
|
||||
is_coomer =any (d in domain for d in ['coomer.su','coomer.party'])
|
||||
if not (is_kemono or is_coomer ):return None ,None ,None
|
||||
path_parts =[part for part in parsed_url .path .strip ('/').split ('/')if part ]
|
||||
if len (path_parts )>=3 and path_parts [1 ].lower ()=='user':
|
||||
service =path_parts [0 ]
|
||||
user_id =path_parts [2 ]
|
||||
if len (path_parts )>=5 and path_parts [3 ].lower ()=='post':
|
||||
post_id =path_parts [4 ]
|
||||
return service ,user_id ,post_id
|
||||
if len (path_parts )>=5 and path_parts [0 ].lower ()=='api'and path_parts [1 ].lower ()=='v1'and path_parts [3 ].lower ()=='user':
|
||||
service =path_parts [2 ]
|
||||
user_id =path_parts [4 ]
|
||||
if len (path_parts )>=7 and path_parts [5 ].lower ()=='post':
|
||||
post_id =path_parts [6 ]
|
||||
return service ,user_id ,post_id
|
||||
except Exception as e :
|
||||
print (f"Debug: Exception during extract_post_info for URL '{url_string }': {e }")
|
||||
return None ,None ,None
|
||||
def prepare_cookies_for_request (use_cookie_flag ,cookie_text_input ,selected_cookie_file_path_from_ui ,app_base_dir ,logger_func ,target_domain =None ):
|
||||
"""Prepares a cookie dictionary from text input or cookies.txt file."""
|
||||
if not use_cookie_flag :
|
||||
return None
|
||||
|
||||
attempted_paths =set ()
|
||||
|
||||
|
||||
if selected_cookie_file_path_from_ui :
|
||||
basename_selected =os .path .basename (selected_cookie_file_path_from_ui )
|
||||
is_relevant_selection =False
|
||||
if target_domain :
|
||||
if basename_selected ==f"{target_domain }_cookies.txt"or basename_selected =="cookies.txt":
|
||||
is_relevant_selection =True
|
||||
else :
|
||||
is_relevant_selection =True
|
||||
|
||||
if is_relevant_selection :
|
||||
logger_func (f" 🍪 Attempting to load cookies from UI-selected file: '{basename_selected }' for domain '{target_domain or 'any'}'...")
|
||||
norm_selected_path =os .path .normpath (selected_cookie_file_path_from_ui )
|
||||
attempted_paths .add (norm_selected_path )
|
||||
cookies =load_cookies_from_netscape_file (selected_cookie_file_path_from_ui ,logger_func ,target_domain_filter =target_domain )
|
||||
if cookies :
|
||||
return cookies
|
||||
else :
|
||||
logger_func (f" ⚠️ Failed to load cookies from UI-selected file: '{basename_selected }'.")
|
||||
else :
|
||||
logger_func (f" ℹ️ UI-selected cookie file '{basename_selected }' is not specific to target domain '{target_domain }' or generic. Skipping it for this request, will try other sources.")
|
||||
|
||||
|
||||
if app_base_dir and target_domain :
|
||||
domain_specific_filename =f"{target_domain }_cookies.txt"
|
||||
domain_specific_path =os .path .join (app_base_dir ,domain_specific_filename )
|
||||
norm_domain_specific_path =os .path .normpath (domain_specific_path )
|
||||
if os .path .exists (domain_specific_path )and norm_domain_specific_path not in attempted_paths :
|
||||
logger_func (f" 🍪 Attempting to load domain-specific cookies: '{domain_specific_filename }' for '{target_domain }' from app directory...")
|
||||
attempted_paths .add (norm_domain_specific_path )
|
||||
cookies =load_cookies_from_netscape_file (domain_specific_path ,logger_func ,target_domain_filter =target_domain )
|
||||
if cookies :
|
||||
return cookies
|
||||
else :
|
||||
logger_func (f" ⚠️ Failed to load cookies from '{domain_specific_filename }' in app directory.")
|
||||
|
||||
|
||||
if app_base_dir :
|
||||
default_cookies_filename ="cookies.txt"
|
||||
default_cookies_path =os .path .join (app_base_dir ,default_cookies_filename )
|
||||
norm_default_path =os .path .normpath (default_cookies_path )
|
||||
if os .path .exists (default_cookies_path )and norm_default_path not in attempted_paths :
|
||||
logger_func (f" 🍪 Attempting to load default '{default_cookies_filename }' from app directory for domain '{target_domain or 'any'}'...")
|
||||
attempted_paths .add (norm_default_path )
|
||||
cookies =load_cookies_from_netscape_file (default_cookies_path ,logger_func ,target_domain_filter =target_domain )
|
||||
if cookies :
|
||||
return cookies
|
||||
else :
|
||||
logger_func (f" ⚠️ Failed to load cookies from default '{default_cookies_filename }' in app directory.")
|
||||
|
||||
|
||||
if cookie_text_input :
|
||||
logger_func (f" 🍪 Using cookies from UI text input for domain '{target_domain or 'any'}' (as file methods failed or were not applicable).")
|
||||
cookies =parse_cookie_string (cookie_text_input )
|
||||
if cookies :
|
||||
return cookies
|
||||
else :
|
||||
logger_func (" ⚠️ UI cookie text input was provided but was empty or invalid.")
|
||||
|
||||
logger_func (f" 🍪 Cookie usage enabled for domain '{target_domain or 'any'}', but no valid cookies found from any source.")
|
||||
return None
|
||||
def fetch_posts_paginated (api_url_base ,headers ,offset ,logger ,cancellation_event =None ,pause_event =None ,cookies_dict =None ):
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Fetch cancelled before request.")
|
||||
raise RuntimeError ("Fetch operation cancelled by user.")
|
||||
if pause_event and pause_event .is_set ():
|
||||
logger (" Post fetching paused...")
|
||||
while pause_event .is_set ():
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Post fetching cancelled while paused.")
|
||||
raise RuntimeError ("Fetch operation cancelled by user.")
|
||||
time .sleep (0.5 )
|
||||
logger (" Post fetching resumed.")
|
||||
paginated_url =f'{api_url_base }?o={offset }'
|
||||
max_retries =3
|
||||
retry_delay =5
|
||||
|
||||
for attempt in range (max_retries +1 ):
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
raise RuntimeError ("Fetch operation cancelled by user during retry loop.")
|
||||
|
||||
log_message =f" Fetching: {paginated_url } (Page approx. {offset //50 +1 })"
|
||||
if attempt >0 :
|
||||
log_message +=f" (Attempt {attempt +1 }/{max_retries +1 })"
|
||||
logger (log_message )
|
||||
|
||||
try :
|
||||
response =requests .get (paginated_url ,headers =headers ,timeout =(15 ,90 ),cookies =cookies_dict )
|
||||
response .raise_for_status ()
|
||||
|
||||
if 'application/json'not in response .headers .get ('Content-Type','').lower ():
|
||||
logger (f"⚠️ Unexpected content type from API: {response .headers .get ('Content-Type')}. Body: {response .text [:200 ]}")
|
||||
return []
|
||||
|
||||
return response .json ()
|
||||
|
||||
except (requests .exceptions .Timeout ,requests .exceptions .ConnectionError )as e :
|
||||
logger (f" ⚠️ Retryable network error on page fetch (Attempt {attempt +1 }): {e }")
|
||||
if attempt <max_retries :
|
||||
delay =retry_delay *(2 **attempt )
|
||||
logger (f" Retrying in {delay } seconds...")
|
||||
sleep_start =time .time ()
|
||||
while time .time ()-sleep_start <delay :
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
raise RuntimeError ("Fetch operation cancelled by user during retry delay.")
|
||||
time .sleep (0.1 )
|
||||
continue
|
||||
else :
|
||||
logger (f" ❌ Failed to fetch page after {max_retries +1 } attempts.")
|
||||
raise RuntimeError (f"Timeout or connection error fetching offset {offset } from {paginated_url }")
|
||||
|
||||
except requests .exceptions .RequestException as e :
|
||||
err_msg =f"Error fetching offset {offset } from {paginated_url }: {e }"
|
||||
if e .response is not None :
|
||||
err_msg +=f" (Status: {e .response .status_code }, Body: {e .response .text [:200 ]})"
|
||||
if isinstance (e ,requests .exceptions .ConnectionError )and ("Failed to resolve"in str (e )or "NameResolutionError"in str (e )):
|
||||
err_msg +="\n 💡 This looks like a DNS resolution problem. Please check your internet connection, DNS settings, or VPN."
|
||||
raise RuntimeError (err_msg )
|
||||
except ValueError as e :
|
||||
raise RuntimeError (f"Error decoding JSON from offset {offset } ({paginated_url }): {e }. Response text: {response .text [:200 ]}")
|
||||
except Exception as e :
|
||||
raise RuntimeError (f"Unexpected error fetching offset {offset } ({paginated_url }): {e }")
|
||||
|
||||
raise RuntimeError (f"Failed to fetch page {paginated_url } after all attempts.")
|
||||
def fetch_post_comments (api_domain ,service ,user_id ,post_id ,headers ,logger ,cancellation_event =None ,pause_event =None ,cookies_dict =None ):
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Comment fetch cancelled before request.")
|
||||
raise RuntimeError ("Comment fetch operation cancelled by user.")
|
||||
if pause_event and pause_event .is_set ():
|
||||
logger (" Comment fetching paused...")
|
||||
while pause_event .is_set ():
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Comment fetching cancelled while paused.")
|
||||
raise RuntimeError ("Comment fetch operation cancelled by user.")
|
||||
time .sleep (0.5 )
|
||||
logger (" Comment fetching resumed.")
|
||||
comments_api_url =f"https://{api_domain }/api/v1/{service }/user/{user_id }/post/{post_id }/comments"
|
||||
max_retries =2
|
||||
retry_delay =3
|
||||
|
||||
for attempt in range (max_retries +1 ):
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
raise RuntimeError ("Comment fetch operation cancelled by user during retry loop.")
|
||||
|
||||
log_message =f" Fetching comments: {comments_api_url }"
|
||||
if attempt >0 :
|
||||
log_message +=f" (Attempt {attempt +1 }/{max_retries +1 })"
|
||||
logger (log_message )
|
||||
|
||||
try :
|
||||
response =requests .get (comments_api_url ,headers =headers ,timeout =(10 ,30 ),cookies =cookies_dict )
|
||||
response .raise_for_status ()
|
||||
|
||||
if 'application/json'not in response .headers .get ('Content-Type','').lower ():
|
||||
logger (f"⚠️ Unexpected content type from comments API: {response .headers .get ('Content-Type')}. Body: {response .text [:200 ]}")
|
||||
return []
|
||||
|
||||
return response .json ()
|
||||
|
||||
except (requests .exceptions .Timeout ,requests .exceptions .ConnectionError )as e :
|
||||
logger (f" ⚠️ Retryable network error on comment fetch (Attempt {attempt +1 }): {e }")
|
||||
if attempt <max_retries :
|
||||
delay =retry_delay *(2 **attempt )
|
||||
logger (f" Retrying in {delay } seconds...")
|
||||
sleep_start =time .time ()
|
||||
while time .time ()-sleep_start <delay :
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
raise RuntimeError ("Comment fetch operation cancelled by user during retry delay.")
|
||||
time .sleep (0.1 )
|
||||
continue
|
||||
else :
|
||||
logger (f" ❌ Failed to fetch comments for post {post_id } after {max_retries +1 } attempts.")
|
||||
raise RuntimeError (f"Timeout or connection error fetching comments for post {post_id } from {comments_api_url }")
|
||||
|
||||
except requests .exceptions .RequestException as e :
|
||||
err_msg =f"Error fetching comments for post {post_id } from {comments_api_url }: {e }"
|
||||
if e .response is not None :
|
||||
err_msg +=f" (Status: {e .response .status_code }, Body: {e .response .text [:200 ]})"
|
||||
if isinstance (e ,requests .exceptions .ConnectionError )and ("Failed to resolve"in str (e )or "NameResolutionError"in str (e )):
|
||||
err_msg +="\n 💡 This looks like a DNS resolution problem. Please check your internet connection, DNS settings, or VPN."
|
||||
raise RuntimeError (err_msg )
|
||||
except ValueError as e :
|
||||
raise RuntimeError (f"Error decoding JSON from comments API for post {post_id } ({comments_api_url }): {e }. Response text: {response .text [:200 ]}")
|
||||
except Exception as e :
|
||||
raise RuntimeError (f"Unexpected error fetching comments for post {post_id } ({comments_api_url }): {e }")
|
||||
|
||||
raise RuntimeError (f"Failed to fetch comments for post {post_id } after all attempts.")
|
||||
def download_from_api (
|
||||
api_url_input ,
|
||||
logger =print ,
|
||||
start_page =None ,
|
||||
end_page =None ,
|
||||
manga_mode =False ,
|
||||
cancellation_event =None ,
|
||||
pause_event =None ,
|
||||
use_cookie =False ,
|
||||
cookie_text ="",
|
||||
selected_cookie_file =None ,
|
||||
app_base_dir =None ,
|
||||
manga_filename_style_for_sort_check =None
|
||||
):
|
||||
headers ={
|
||||
'User-Agent':'Mozilla/5.0',
|
||||
'Accept':'application/json'
|
||||
}
|
||||
|
||||
service ,user_id ,target_post_id =extract_post_info (api_url_input )
|
||||
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Download_from_api cancelled at start.")
|
||||
return
|
||||
|
||||
parsed_input_url_for_domain =urlparse (api_url_input )
|
||||
api_domain =parsed_input_url_for_domain .netloc
|
||||
if not any (d in api_domain .lower ()for d in ['kemono.su','kemono.party','coomer.su','coomer.party']):
|
||||
logger (f"⚠️ Unrecognized domain '{api_domain }' from input URL. Defaulting to kemono.su for API calls.")
|
||||
api_domain ="kemono.su"
|
||||
cookies_for_api =None
|
||||
if use_cookie and app_base_dir :
|
||||
cookies_for_api =prepare_cookies_for_request (use_cookie ,cookie_text ,selected_cookie_file ,app_base_dir ,logger ,target_domain =api_domain )
|
||||
if target_post_id :
|
||||
direct_post_api_url =f"https://{api_domain }/api/v1/{service }/user/{user_id }/post/{target_post_id }"
|
||||
logger (f" Attempting direct fetch for target post: {direct_post_api_url }")
|
||||
try :
|
||||
direct_response =requests .get (direct_post_api_url ,headers =headers ,timeout =(10 ,30 ),cookies =cookies_for_api )
|
||||
direct_response .raise_for_status ()
|
||||
direct_post_data =direct_response .json ()
|
||||
if isinstance (direct_post_data ,list )and direct_post_data :
|
||||
direct_post_data =direct_post_data [0 ]
|
||||
if isinstance (direct_post_data ,dict )and 'post'in direct_post_data and isinstance (direct_post_data ['post'],dict ):
|
||||
direct_post_data =direct_post_data ['post']
|
||||
if isinstance (direct_post_data ,dict )and direct_post_data .get ('id')==target_post_id :
|
||||
logger (f" ✅ Direct fetch successful for post {target_post_id }.")
|
||||
yield [direct_post_data ]
|
||||
return
|
||||
else :
|
||||
response_type =type (direct_post_data ).__name__
|
||||
response_snippet =str (direct_post_data )[:200 ]
|
||||
logger (f" ⚠️ Direct fetch for post {target_post_id } returned unexpected data (Type: {response_type }, Snippet: '{response_snippet }'). Falling back to pagination.")
|
||||
except requests .exceptions .RequestException as e :
|
||||
logger (f" ⚠️ Direct fetch failed for post {target_post_id }: {e }. Falling back to pagination.")
|
||||
except Exception as e :
|
||||
logger (f" ⚠️ Unexpected error during direct fetch for post {target_post_id }: {e }. Falling back to pagination.")
|
||||
if not service or not user_id :
|
||||
logger (f"❌ Invalid URL or could not extract service/user: {api_url_input }")
|
||||
return
|
||||
if target_post_id and (start_page or end_page ):
|
||||
logger ("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).")
|
||||
|
||||
is_manga_mode_fetch_all_and_sort_oldest_first =manga_mode and (manga_filename_style_for_sort_check !=STYLE_DATE_POST_TITLE )and not target_post_id
|
||||
api_base_url =f"https://{api_domain }/api/v1/{service }/user/{user_id }"
|
||||
page_size =50
|
||||
if is_manga_mode_fetch_all_and_sort_oldest_first :
|
||||
logger (f" Manga Mode (Style: {manga_filename_style_for_sort_check if manga_filename_style_for_sort_check else 'Default'} - Oldest First Sort Active): Fetching all posts to sort by date...")
|
||||
all_posts_for_manga_mode =[]
|
||||
current_offset_manga =0
|
||||
if start_page and start_page >1 :
|
||||
current_offset_manga =(start_page -1 )*page_size
|
||||
logger (f" Manga Mode: Starting fetch from page {start_page } (offset {current_offset_manga }).")
|
||||
elif start_page :
|
||||
logger (f" Manga Mode: Starting fetch from page 1 (offset 0).")
|
||||
if end_page :
|
||||
logger (f" Manga Mode: Will fetch up to page {end_page }.")
|
||||
while True :
|
||||
if pause_event and pause_event .is_set ():
|
||||
logger (" Manga mode post fetching paused...")
|
||||
while pause_event .is_set ():
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Manga mode post fetching cancelled while paused.")
|
||||
break
|
||||
time .sleep (0.5 )
|
||||
if not (cancellation_event and cancellation_event .is_set ()):logger (" Manga mode post fetching resumed.")
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Manga mode post fetching cancelled.")
|
||||
break
|
||||
current_page_num_manga =(current_offset_manga //page_size )+1
|
||||
if end_page and current_page_num_manga >end_page :
|
||||
logger (f" Manga Mode: Reached specified end page ({end_page }). Stopping post fetch.")
|
||||
break
|
||||
try :
|
||||
posts_batch_manga =fetch_posts_paginated (api_base_url ,headers ,current_offset_manga ,logger ,cancellation_event ,pause_event ,cookies_dict =cookies_for_api )
|
||||
if not isinstance (posts_batch_manga ,list ):
|
||||
logger (f"❌ API Error (Manga Mode): Expected list of posts, got {type (posts_batch_manga )}.")
|
||||
break
|
||||
if not posts_batch_manga :
|
||||
logger ("✅ Reached end of posts (Manga Mode fetch all).")
|
||||
if start_page and not end_page and current_page_num_manga <start_page :
|
||||
logger (f" Manga Mode: No posts found on or after specified start page {start_page }.")
|
||||
elif end_page and current_page_num_manga <=end_page and not all_posts_for_manga_mode :
|
||||
logger (f" Manga Mode: No posts found within the specified page range ({start_page or 1 }-{end_page }).")
|
||||
break
|
||||
all_posts_for_manga_mode .extend (posts_batch_manga )
|
||||
current_offset_manga +=page_size
|
||||
time .sleep (0.6 )
|
||||
except RuntimeError as e :
|
||||
if "cancelled by user"in str (e ).lower ():
|
||||
logger (f"ℹ️ Manga mode pagination stopped due to cancellation: {e }")
|
||||
else :
|
||||
logger (f"❌ {e }\n Aborting manga mode pagination.")
|
||||
break
|
||||
except Exception as e :
|
||||
logger (f"❌ Unexpected error during manga mode fetch: {e }")
|
||||
traceback .print_exc ()
|
||||
break
|
||||
if cancellation_event and cancellation_event .is_set ():return
|
||||
if all_posts_for_manga_mode :
|
||||
logger (f" Manga Mode: Fetched {len (all_posts_for_manga_mode )} total posts. Sorting by publication date (oldest first)...")
|
||||
def sort_key_tuple (post ):
|
||||
published_date_str =post .get ('published')
|
||||
added_date_str =post .get ('added')
|
||||
post_id_str =post .get ('id',"0")
|
||||
primary_sort_val ="0000-00-00T00:00:00"
|
||||
if published_date_str :
|
||||
primary_sort_val =published_date_str
|
||||
elif added_date_str :
|
||||
logger (f" ⚠️ Post ID {post_id_str } missing 'published' date, using 'added' date '{added_date_str }' for primary sorting.")
|
||||
primary_sort_val =added_date_str
|
||||
else :
|
||||
logger (f" ⚠️ Post ID {post_id_str } missing both 'published' and 'added' dates. Placing at start of sort (using default earliest date).")
|
||||
secondary_sort_val =0
|
||||
try :
|
||||
secondary_sort_val =int (post_id_str )
|
||||
except ValueError :
|
||||
logger (f" ⚠️ Post ID '{post_id_str }' is not a valid integer for secondary sorting, using 0.")
|
||||
return (primary_sort_val ,secondary_sort_val )
|
||||
all_posts_for_manga_mode .sort (key =sort_key_tuple )
|
||||
for i in range (0 ,len (all_posts_for_manga_mode ),page_size ):
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Manga mode post yielding cancelled.")
|
||||
break
|
||||
yield all_posts_for_manga_mode [i :i +page_size ]
|
||||
return
|
||||
|
||||
|
||||
|
||||
if manga_mode and not target_post_id and (manga_filename_style_for_sort_check ==STYLE_DATE_POST_TITLE ):
|
||||
logger (f" Manga Mode (Style: {STYLE_DATE_POST_TITLE }): Processing posts in default API order (newest first).")
|
||||
|
||||
current_page_num =1
|
||||
current_offset =0
|
||||
processed_target_post_flag =False
|
||||
if start_page and start_page >1 and not target_post_id :
|
||||
current_offset =(start_page -1 )*page_size
|
||||
current_page_num =start_page
|
||||
logger (f" Starting from page {current_page_num } (calculated offset {current_offset }).")
|
||||
while True :
|
||||
if pause_event and pause_event .is_set ():
|
||||
logger (" Post fetching loop paused...")
|
||||
while pause_event .is_set ():
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Post fetching loop cancelled while paused.")
|
||||
break
|
||||
time .sleep (0.5 )
|
||||
if not (cancellation_event and cancellation_event .is_set ()):logger (" Post fetching loop resumed.")
|
||||
if cancellation_event and cancellation_event .is_set ():
|
||||
logger (" Post fetching loop cancelled.")
|
||||
break
|
||||
if target_post_id and processed_target_post_flag :
|
||||
break
|
||||
if not target_post_id and end_page and current_page_num >end_page :
|
||||
logger (f"✅ Reached specified end page ({end_page }) for creator feed. Stopping.")
|
||||
break
|
||||
try :
|
||||
posts_batch =fetch_posts_paginated (api_base_url ,headers ,current_offset ,logger ,cancellation_event ,pause_event ,cookies_dict =cookies_for_api )
|
||||
if not isinstance (posts_batch ,list ):
|
||||
logger (f"❌ API Error: Expected list of posts, got {type (posts_batch )} at page {current_page_num } (offset {current_offset }).")
|
||||
break
|
||||
except RuntimeError as e :
|
||||
if "cancelled by user"in str (e ).lower ():
|
||||
logger (f"ℹ️ Pagination stopped due to cancellation: {e }")
|
||||
else :
|
||||
logger (f"❌ {e }\n Aborting pagination at page {current_page_num } (offset {current_offset }).")
|
||||
break
|
||||
except Exception as e :
|
||||
logger (f"❌ Unexpected error fetching page {current_page_num } (offset {current_offset }): {e }")
|
||||
traceback .print_exc ()
|
||||
break
|
||||
if not posts_batch :
|
||||
if target_post_id and not processed_target_post_flag :
|
||||
logger (f"❌ Target post {target_post_id } not found after checking all available pages (API returned no more posts at offset {current_offset }).")
|
||||
elif not target_post_id :
|
||||
if current_page_num ==(start_page or 1 ):
|
||||
logger (f"😕 No posts found on the first page checked (page {current_page_num }, offset {current_offset }).")
|
||||
else :
|
||||
logger (f"✅ Reached end of posts (no more content from API at offset {current_offset }).")
|
||||
break
|
||||
if target_post_id and not processed_target_post_flag :
|
||||
matching_post =next ((p for p in posts_batch if str (p .get ('id'))==str (target_post_id )),None )
|
||||
if matching_post :
|
||||
logger (f"🎯 Found target post {target_post_id } on page {current_page_num } (offset {current_offset }).")
|
||||
yield [matching_post ]
|
||||
processed_target_post_flag =True
|
||||
elif not target_post_id :
|
||||
yield posts_batch
|
||||
if processed_target_post_flag :
|
||||
break
|
||||
current_offset +=page_size
|
||||
current_page_num +=1
|
||||
time .sleep (0.6 )
|
||||
if target_post_id and not processed_target_post_flag and not (cancellation_event and cancellation_event .is_set ()):
|
||||
logger (f"❌ Target post {target_post_id } could not be found after checking all relevant pages (final check after loop).")
|
||||
def get_link_platform (url ):
|
||||
try :
|
||||
domain =urlparse (url ).netloc .lower ()
|
||||
if 'drive.google.com'in domain :return 'google drive'
|
||||
if 'mega.nz'in domain or 'mega.io'in domain :return 'mega'
|
||||
if 'dropbox.com'in domain :return 'dropbox'
|
||||
if 'patreon.com'in domain :return 'patreon'
|
||||
if 'gofile.io'in domain :return 'gofile'
|
||||
if 'instagram.com'in domain :return 'instagram'
|
||||
if 'twitter.com'in domain or 'x.com'in domain :return 'twitter/x'
|
||||
if 'discord.gg'in domain or 'discord.com/invite'in domain :return 'discord invite'
|
||||
if 'pixiv.net'in domain :return 'pixiv'
|
||||
if 'kemono.su'in domain or 'kemono.party'in domain :return 'kemono'
|
||||
if 'coomer.su'in domain or 'coomer.party'in domain :return 'coomer'
|
||||
parts =domain .split ('.')
|
||||
if len (parts )>=2 :
|
||||
if parts [-2 ]not in ['com','org','net','gov','edu','co']or len (parts )==2 :
|
||||
return parts [-2 ]
|
||||
elif len (parts )>=3 and parts [-3 ]not in ['com','org','net','gov','edu','co']:
|
||||
return parts [-3 ]
|
||||
else :
|
||||
return domain
|
||||
return 'external'
|
||||
except Exception :return 'unknown'
|
||||
class PostProcessorSignals (QObject ):
|
||||
progress_signal =pyqtSignal (str )
|
||||
file_download_status_signal =pyqtSignal (bool )
|
||||
@ -815,8 +48,8 @@ class PostProcessorSignals (QObject ):
|
||||
file_progress_signal =pyqtSignal (str ,object )
|
||||
file_successfully_downloaded_signal =pyqtSignal (dict )
|
||||
missed_character_post_signal =pyqtSignal (str ,str )
|
||||
class PostProcessorWorker :
|
||||
|
||||
|
||||
class PostProcessorWorker:
|
||||
def __init__ (self ,post_data ,download_root ,known_names ,
|
||||
filter_character_list ,emitter ,
|
||||
unwanted_keywords ,filter_mode ,skip_zip ,skip_rar ,
|
||||
@ -2122,9 +1355,6 @@ class PostProcessorWorker :
|
||||
except Exception as e:
|
||||
self.logger(f"⚠️ Could not update session file for post {post_id}: {e}")
|
||||
|
||||
|
||||
|
||||
|
||||
if not self .extract_links_only and (total_downloaded_this_post >0 or not (
|
||||
(current_character_filters and (
|
||||
(self .char_filter_scope ==CHAR_SCOPE_TITLE and not post_is_candidate_by_title_char_match )or
|
||||
@ -2149,13 +1379,8 @@ class PostProcessorWorker :
|
||||
if self .check_cancel ():self .logger (f" Post {post_id } processing interrupted/cancelled.");
|
||||
else :self .logger (f" Post {post_id } Summary: Downloaded={total_downloaded_this_post }, Skipped Files={total_skipped_this_post }")
|
||||
|
||||
|
||||
if not self .extract_links_only and self .use_post_subfolders and total_downloaded_this_post ==0 :
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
path_to_check_for_emptiness =determined_post_save_path_for_history
|
||||
try :
|
||||
if os .path .isdir (path_to_check_for_emptiness )and not os .listdir (path_to_check_for_emptiness ):
|
||||
@ -2165,6 +1390,7 @@ class PostProcessorWorker :
|
||||
self .logger (f" ⚠️ Could not remove empty post-specific subfolder '{path_to_check_for_emptiness }': {e_rmdir }")
|
||||
|
||||
return total_downloaded_this_post ,total_skipped_this_post ,kept_original_filenames_for_log ,retryable_failures_this_post ,permanent_failures_this_post ,history_data_for_this_post
|
||||
|
||||
class DownloadThread (QThread ):
|
||||
progress_signal =pyqtSignal (str )
|
||||
add_character_prompt_signal =pyqtSignal (str )
|
||||
@ -2322,7 +1548,7 @@ class DownloadThread (QThread ):
|
||||
if self .manga_mode_active and self .manga_filename_style ==STYLE_POST_TITLE_GLOBAL_NUMBERING and not self .extract_links_only and self .manga_global_file_counter_ref is None :
|
||||
self .manga_global_file_counter_ref =[1 ,threading .Lock ()]
|
||||
self .logger (f"ℹ️ [Thread] Manga Title+GlobalNum Mode: Initialized global counter at {self .manga_global_file_counter_ref [0 ]}.")
|
||||
worker_signals_obj =PostProcessorSignals ()
|
||||
worker_signals_obj = PostProcessorSignals ()
|
||||
try :
|
||||
worker_signals_obj .progress_signal .connect (self .progress_signal )
|
||||
worker_signals_obj .file_download_status_signal .connect (self .file_download_status_signal )
|
||||
@ -2423,9 +1649,6 @@ class DownloadThread (QThread ):
|
||||
if not was_process_cancelled and not self .isInterruptionRequested ():
|
||||
self .logger ("✅ All posts processed or end of content reached by DownloadThread.")
|
||||
|
||||
|
||||
|
||||
|
||||
except Exception as main_thread_err :
|
||||
self .logger (f"\n❌ Critical error within DownloadThread run loop: {main_thread_err }")
|
||||
traceback .print_exc ()
|
||||
@ -2448,85 +1671,6 @@ class DownloadThread (QThread ):
|
||||
self ._add_character_response =result
|
||||
self .logger (f" (DownloadThread) Received character prompt response: {'Yes (added/confirmed)'if result else 'No (declined/failed)'}")
|
||||
|
||||
def download_mega_file (mega_link ,download_path =".",logger_func =print ):
|
||||
"""
|
||||
Downloads a file from a public Mega.nz link.
|
||||
|
||||
Args:
|
||||
mega_link (str): The public Mega.nz link to the file.
|
||||
download_path (str, optional): The directory to save the downloaded file.
|
||||
Defaults to the current directory.
|
||||
logger_func (callable, optional): Function to use for logging. Defaults to print.
|
||||
"""
|
||||
logger_func ("Initializing Mega client...")
|
||||
try :
|
||||
mega_client =Mega ()
|
||||
except NameError :
|
||||
logger_func ("ERROR: Mega class not available. mega.py library might not be installed correctly.")
|
||||
raise ImportError ("Mega class not found. Is mega.py installed?")
|
||||
|
||||
m =mega_client .login ()
|
||||
|
||||
logger_func (f"Attempting to download from: {mega_link }")
|
||||
|
||||
try :
|
||||
|
||||
|
||||
logger_func (f" Verifying Mega link and fetching attributes: {mega_link }")
|
||||
file_attributes =m .get_public_url_info (mega_link )
|
||||
|
||||
if not file_attributes or not isinstance (file_attributes ,dict ):
|
||||
logger_func (f"❌ Error: Could not retrieve valid file information for the Mega link. Link might be invalid, expired, or a folder. Info received: {file_attributes }")
|
||||
raise ValueError (f"Invalid or inaccessible Mega link. get_public_url_info returned: {file_attributes }")
|
||||
|
||||
expected_filename =file_attributes .get ('name')
|
||||
file_size =file_attributes .get ('size')
|
||||
|
||||
if not expected_filename :
|
||||
logger_func (f"⚠️ Critical: File name ('name') not found in Mega link attributes. Attributes: {file_attributes }")
|
||||
raise ValueError (f"File name ('name') not found in Mega link attributes: {file_attributes }")
|
||||
|
||||
logger_func (f" Link verified. Expected filename: '{expected_filename }'. Size: {file_size if file_size is not None else 'Unknown'} bytes.")
|
||||
|
||||
if not os .path .exists (download_path ):
|
||||
logger_func (f"Download path '{download_path }' does not exist. Creating it...")
|
||||
os .makedirs (download_path ,exist_ok =True )
|
||||
|
||||
logger_func (f"Starting download of '{expected_filename }' to '{download_path }'...")
|
||||
|
||||
|
||||
download_result =m .download_url (mega_link ,dest_path =download_path ,dest_filename =None )
|
||||
|
||||
if download_result and isinstance (download_result ,tuple )and len (download_result )==2 :
|
||||
saved_filepath ,saved_filename =download_result
|
||||
|
||||
if not os .path .isabs (saved_filepath )and dest_path :
|
||||
saved_filepath =os .path .join (os .path .abspath (dest_path ),saved_filename )
|
||||
|
||||
logger_func (f"File downloaded successfully! Saved as: {saved_filepath }")
|
||||
if not os .path .exists (saved_filepath ):
|
||||
logger_func (f"⚠️ Warning: mega.py reported success but file '{saved_filepath }' not found on disk.")
|
||||
|
||||
if saved_filename !=expected_filename :
|
||||
logger_func (f" Note: Saved filename '{saved_filename }' differs from initially expected '{expected_filename }'. This is usually fine.")
|
||||
else :
|
||||
logger_func (f"Download failed. The download_url method returned: {download_result }")
|
||||
raise Exception (f"Mega download_url did not return expected result or failed. Result: {download_result }")
|
||||
|
||||
except PermissionError :
|
||||
logger_func (f"Error: Permission denied to write to '{download_path }'. Please check permissions.")
|
||||
raise
|
||||
except FileNotFoundError :
|
||||
logger_func (f"Error: The specified download path '{download_path }' is invalid or a component was not found.")
|
||||
raise
|
||||
except requests .exceptions .RequestException as e :
|
||||
logger_func (f"Error during request to Mega (network issue, etc.): {e }")
|
||||
raise
|
||||
except ValueError as ve :
|
||||
logger_func (f"ValueError during Mega processing (likely invalid link): {ve }")
|
||||
raise
|
||||
except Exception as e :
|
||||
if isinstance (e ,TypeError )and "'bool' object is not subscriptable"in str (e ):
|
||||
logger_func (" This specific TypeError occurred despite pre-flight checks. This might indicate a deeper issue with the mega.py library or a very transient API problem for this link.")
|
||||
traceback .print_exc ()
|
||||
raise
|
||||
class InterruptedError(Exception):
|
||||
"""Custom exception for handling cancellations gracefully."""
|
||||
pass
|
||||
1
src/i18n/__init__.py
Normal file
1
src/i18n/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
File diff suppressed because one or more lines are too long
1
src/services/__init__.py
Normal file
1
src/services/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
163
src/services/drive_downloader.py
Normal file
163
src/services/drive_downloader.py
Normal file
@ -0,0 +1,163 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import re
|
||||
import traceback
|
||||
import json
|
||||
from urllib.parse import urlparse, urlunparse, parse_qs, urlencode
|
||||
|
||||
# --- Third-Party Library Imports ---
|
||||
import requests
|
||||
try:
|
||||
from mega import Mega
|
||||
MEGA_AVAILABLE = True
|
||||
except ImportError:
|
||||
MEGA_AVAILABLE = False
|
||||
|
||||
try:
|
||||
import gdown
|
||||
GDOWN_AVAILABLE = True
|
||||
except ImportError:
|
||||
GDOWN_AVAILABLE = False
|
||||
|
||||
# --- Helper Functions ---
|
||||
|
||||
def _get_filename_from_headers(headers):
|
||||
"""
|
||||
Extracts a filename from the Content-Disposition header.
|
||||
|
||||
Args:
|
||||
headers (dict): A dictionary of HTTP response headers.
|
||||
|
||||
Returns:
|
||||
str or None: The extracted filename, or None if not found.
|
||||
"""
|
||||
cd = headers.get('content-disposition')
|
||||
if not cd:
|
||||
return None
|
||||
|
||||
fname_match = re.findall('filename="?([^"]+)"?', cd)
|
||||
if fname_match:
|
||||
# Sanitize the filename to prevent directory traversal issues
|
||||
# and remove invalid characters for most filesystems.
|
||||
sanitized_name = re.sub(r'[<>:"/\\|?*]', '_', fname_match[0].strip())
|
||||
return sanitized_name
|
||||
|
||||
return None
|
||||
|
||||
# --- Main Service Downloader Functions ---
|
||||
|
||||
def download_mega_file(mega_link, download_path=".", logger_func=print):
|
||||
"""
|
||||
Downloads a file from a public Mega.nz link.
|
||||
|
||||
Args:
|
||||
mega_link (str): The public Mega.nz link to the file.
|
||||
download_path (str): The directory to save the downloaded file.
|
||||
logger_func (callable): Function to use for logging.
|
||||
"""
|
||||
if not MEGA_AVAILABLE:
|
||||
logger_func("❌ Error: mega.py library is not installed. Cannot download from Mega.")
|
||||
logger_func(" Please install it: pip install mega.py")
|
||||
raise ImportError("mega.py library not found.")
|
||||
|
||||
logger_func(f" [Mega] Initializing Mega client...")
|
||||
try:
|
||||
mega_client = Mega()
|
||||
m = mega_client.login()
|
||||
logger_func(f" [Mega] Attempting to download from: {mega_link}")
|
||||
|
||||
if not os.path.exists(download_path):
|
||||
os.makedirs(download_path, exist_ok=True)
|
||||
logger_func(f" [Mega] Created download directory: {download_path}")
|
||||
|
||||
# The download_url method handles file info fetching and saving internally.
|
||||
downloaded_file_path = m.download_url(mega_link, dest_path=download_path)
|
||||
|
||||
if downloaded_file_path and os.path.exists(downloaded_file_path):
|
||||
logger_func(f" [Mega] ✅ File downloaded successfully! Saved as: {downloaded_file_path}")
|
||||
else:
|
||||
raise Exception(f"Mega download failed or file not found. Returned: {downloaded_file_path}")
|
||||
|
||||
except Exception as e:
|
||||
logger_func(f" [Mega] ❌ An unexpected error occurred during Mega download: {e}")
|
||||
traceback.print_exc(limit=2)
|
||||
raise # Re-raise the exception to be handled by the calling worker
|
||||
|
||||
def download_gdrive_file(gdrive_link, download_path=".", logger_func=print):
|
||||
"""
|
||||
Downloads a file from a public Google Drive link using the gdown library.
|
||||
|
||||
Args:
|
||||
gdrive_link (str): The public Google Drive link to the file.
|
||||
download_path (str): The directory to save the downloaded file.
|
||||
logger_func (callable): Function to use for logging.
|
||||
"""
|
||||
if not GDOWN_AVAILABLE:
|
||||
logger_func("❌ Error: gdown library is not installed. Cannot download from Google Drive.")
|
||||
logger_func(" Please install it: pip install gdown")
|
||||
raise ImportError("gdown library not found.")
|
||||
|
||||
logger_func(f" [GDrive] Attempting to download: {gdrive_link}")
|
||||
try:
|
||||
if not os.path.exists(download_path):
|
||||
os.makedirs(download_path, exist_ok=True)
|
||||
logger_func(f" [GDrive] Created download directory: {download_path}")
|
||||
|
||||
# gdown handles finding the file ID and downloading. 'fuzzy=True' helps with various URL formats.
|
||||
output_file_path = gdown.download(gdrive_link, output=download_path, quiet=False, fuzzy=True)
|
||||
|
||||
if output_file_path and os.path.exists(output_file_path):
|
||||
logger_func(f" [GDrive] ✅ Google Drive file downloaded successfully: {output_file_path}")
|
||||
else:
|
||||
raise Exception(f"gdown download failed or file not found. Returned: {output_file_path}")
|
||||
|
||||
except Exception as e:
|
||||
logger_func(f" [GDrive] ❌ An error occurred during Google Drive download: {e}")
|
||||
traceback.print_exc(limit=2)
|
||||
raise
|
||||
|
||||
def download_dropbox_file(dropbox_link, download_path=".", logger_func=print):
|
||||
"""
|
||||
Downloads a file from a public Dropbox link by modifying the URL for direct download.
|
||||
|
||||
Args:
|
||||
dropbox_link (str): The public Dropbox link to the file.
|
||||
download_path (str): The directory to save the downloaded file.
|
||||
logger_func (callable): Function to use for logging.
|
||||
"""
|
||||
logger_func(f" [Dropbox] Attempting to download: {dropbox_link}")
|
||||
|
||||
# Modify the Dropbox URL to force a direct download instead of showing the preview page.
|
||||
parsed_url = urlparse(dropbox_link)
|
||||
query_params = parse_qs(parsed_url.query)
|
||||
query_params['dl'] = ['1']
|
||||
new_query = urlencode(query_params, doseq=True)
|
||||
direct_download_url = urlunparse(parsed_url._replace(query=new_query))
|
||||
|
||||
logger_func(f" [Dropbox] Using direct download URL: {direct_download_url}")
|
||||
|
||||
try:
|
||||
if not os.path.exists(download_path):
|
||||
os.makedirs(download_path, exist_ok=True)
|
||||
logger_func(f" [Dropbox] Created download directory: {download_path}")
|
||||
|
||||
with requests.get(direct_download_url, stream=True, allow_redirects=True, timeout=(10, 300)) as r:
|
||||
r.raise_for_status()
|
||||
|
||||
# Determine filename from headers or URL
|
||||
filename = _get_filename_from_headers(r.headers) or os.path.basename(parsed_url.path) or "dropbox_file"
|
||||
full_save_path = os.path.join(download_path, filename)
|
||||
|
||||
logger_func(f" [Dropbox] Starting download of '{filename}'...")
|
||||
|
||||
# Write file to disk in chunks
|
||||
with open(full_save_path, 'wb') as f:
|
||||
for chunk in r.iter_content(chunk_size=8192):
|
||||
f.write(chunk)
|
||||
|
||||
logger_func(f" [Dropbox] ✅ Dropbox file downloaded successfully: {full_save_path}")
|
||||
|
||||
except Exception as e:
|
||||
logger_func(f" [Dropbox] ❌ An error occurred during Dropbox download: {e}")
|
||||
traceback.print_exc(limit=2)
|
||||
raise
|
||||
@ -1,6 +1,6 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import time
|
||||
import requests
|
||||
import hashlib
|
||||
import http.client
|
||||
import traceback
|
||||
@ -8,21 +8,38 @@ import threading
|
||||
import queue
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
|
||||
# --- Third-Party Library Imports ---
|
||||
import requests
|
||||
|
||||
# --- Module Constants ---
|
||||
CHUNK_DOWNLOAD_RETRY_DELAY = 2
|
||||
MAX_CHUNK_DOWNLOAD_RETRIES = 1
|
||||
DOWNLOAD_CHUNK_SIZE_ITER = 1024 * 256
|
||||
DOWNLOAD_CHUNK_SIZE_ITER = 1024 * 256 # 256 KB per iteration chunk
|
||||
|
||||
# Flag to indicate if this module and its dependencies are available.
|
||||
# This was missing and caused the ImportError.
|
||||
MULTIPART_DOWNLOADER_AVAILABLE = True
|
||||
|
||||
|
||||
def _download_individual_chunk(chunk_url, temp_file_path, start_byte, end_byte, headers,
|
||||
part_num, total_parts, progress_data, cancellation_event, skip_event, pause_event, global_emit_time_ref, cookies_for_chunk,
|
||||
logger_func, emitter=None, api_original_filename=None):
|
||||
def _download_individual_chunk(
|
||||
chunk_url, temp_file_path, start_byte, end_byte, headers,
|
||||
part_num, total_parts, progress_data, cancellation_event,
|
||||
skip_event, pause_event, global_emit_time_ref, cookies_for_chunk,
|
||||
logger_func, emitter=None, api_original_filename=None
|
||||
):
|
||||
"""
|
||||
Downloads a single segment (chunk) of a larger file. This function is
|
||||
intended to be run in a separate thread by a ThreadPoolExecutor.
|
||||
|
||||
It handles retries, pauses, and cancellations for its specific chunk.
|
||||
"""
|
||||
# --- Pre-download checks for control events ---
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Download cancelled before start.")
|
||||
return 0, False
|
||||
if skip_event and skip_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Skip event triggered before start.")
|
||||
return 0, False
|
||||
|
||||
if pause_event and pause_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Download paused before start...")
|
||||
while pause_event.is_set():
|
||||
@ -32,83 +49,66 @@ def _download_individual_chunk(chunk_url, temp_file_path, start_byte, end_byte,
|
||||
time.sleep(0.2)
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Download resumed.")
|
||||
|
||||
# Prepare headers for the specific byte range of this chunk
|
||||
chunk_headers = headers.copy()
|
||||
if end_byte != -1 :
|
||||
if end_byte != -1:
|
||||
chunk_headers['Range'] = f"bytes={start_byte}-{end_byte}"
|
||||
elif start_byte == 0 and end_byte == -1:
|
||||
pass
|
||||
|
||||
|
||||
bytes_this_chunk = 0
|
||||
last_speed_calc_time = time.time()
|
||||
bytes_at_last_speed_calc = 0
|
||||
|
||||
# --- Retry Loop ---
|
||||
for attempt in range(MAX_CHUNK_DOWNLOAD_RETRIES + 1):
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Cancelled during retry loop.")
|
||||
return bytes_this_chunk, False
|
||||
if skip_event and skip_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Skip event during retry loop.")
|
||||
return bytes_this_chunk, False
|
||||
if pause_event and pause_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Paused during retry loop...")
|
||||
while pause_event.is_set():
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Cancelled while paused in retry loop.")
|
||||
return bytes_this_chunk, False
|
||||
time.sleep(0.2)
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Resumed from retry loop pause.")
|
||||
|
||||
try:
|
||||
if attempt > 0:
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Retrying download (Attempt {attempt}/{MAX_CHUNK_DOWNLOAD_RETRIES})...")
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Retrying (Attempt {attempt + 1}/{MAX_CHUNK_DOWNLOAD_RETRIES + 1})...")
|
||||
time.sleep(CHUNK_DOWNLOAD_RETRY_DELAY * (2 ** (attempt - 1)))
|
||||
last_speed_calc_time = time.time()
|
||||
bytes_at_last_speed_calc = bytes_this_chunk
|
||||
log_msg = f" 🚀 [Chunk {part_num + 1}/{total_parts}] Starting download: bytes {start_byte}-{end_byte if end_byte != -1 else 'EOF'}"
|
||||
logger_func(log_msg)
|
||||
|
||||
logger_func(f" 🚀 [Chunk {part_num + 1}/{total_parts}] Starting download: bytes {start_byte}-{end_byte if end_byte != -1 else 'EOF'}")
|
||||
|
||||
response = requests.get(chunk_url, headers=chunk_headers, timeout=(10, 120), stream=True, cookies=cookies_for_chunk)
|
||||
response.raise_for_status()
|
||||
if start_byte == 0 and end_byte == -1 and int(response.headers.get('Content-Length', 0)) == 0:
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Confirmed 0-byte file.")
|
||||
with progress_data['lock']:
|
||||
progress_data['chunks_status'][part_num]['active'] = False
|
||||
progress_data['chunks_status'][part_num]['speed_bps'] = 0
|
||||
return 0, True
|
||||
|
||||
# --- Data Writing Loop ---
|
||||
with open(temp_file_path, 'r+b') as f:
|
||||
f.seek(start_byte)
|
||||
for data_segment in response.iter_content(chunk_size=DOWNLOAD_CHUNK_SIZE_ITER):
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Cancelled during data iteration.")
|
||||
return bytes_this_chunk, False
|
||||
if skip_event and skip_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Skip event during data iteration.")
|
||||
return bytes_this_chunk, False
|
||||
if pause_event and pause_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Paused during data iteration...")
|
||||
# Handle pausing during the download stream
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Paused...")
|
||||
while pause_event.is_set():
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Cancelled while paused in data iteration.")
|
||||
return bytes_this_chunk, False
|
||||
if cancellation_event and cancellation_event.is_set(): return bytes_this_chunk, False
|
||||
time.sleep(0.2)
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Resumed from data iteration pause.")
|
||||
logger_func(f" [Chunk {part_num + 1}/{total_parts}] Resumed.")
|
||||
|
||||
if data_segment:
|
||||
f.write(data_segment)
|
||||
bytes_this_chunk += len(data_segment)
|
||||
|
||||
# Update shared progress data structure
|
||||
with progress_data['lock']:
|
||||
progress_data['total_downloaded_so_far'] += len(data_segment)
|
||||
progress_data['chunks_status'][part_num]['downloaded'] = bytes_this_chunk
|
||||
progress_data['chunks_status'][part_num]['active'] = True
|
||||
|
||||
|
||||
# Calculate and update speed for this chunk
|
||||
current_time = time.time()
|
||||
time_delta_speed = current_time - last_speed_calc_time
|
||||
if time_delta_speed > 0.5:
|
||||
time_delta = current_time - last_speed_calc_time
|
||||
if time_delta > 0.5:
|
||||
bytes_delta = bytes_this_chunk - bytes_at_last_speed_calc
|
||||
current_speed_bps = (bytes_delta * 8) / time_delta_speed if time_delta_speed > 0 else 0
|
||||
current_speed_bps = (bytes_delta * 8) / time_delta if time_delta > 0 else 0
|
||||
progress_data['chunks_status'][part_num]['speed_bps'] = current_speed_bps
|
||||
last_speed_calc_time = current_time
|
||||
bytes_at_last_speed_calc = bytes_this_chunk
|
||||
bytes_at_last_speed_calc = bytes_this_chunk
|
||||
|
||||
# Emit progress signal to the UI via the queue
|
||||
if emitter and (current_time - global_emit_time_ref[0] > 0.25):
|
||||
global_emit_time_ref[0] = current_time
|
||||
status_list_copy = [dict(s) for s in progress_data['chunks_status']]
|
||||
@ -116,28 +116,19 @@ def _download_individual_chunk(chunk_url, temp_file_path, start_byte, end_byte,
|
||||
emitter.put({'type': 'file_progress', 'payload': (api_original_filename, status_list_copy)})
|
||||
elif hasattr(emitter, 'file_progress_signal'):
|
||||
emitter.file_progress_signal.emit(api_original_filename, status_list_copy)
|
||||
|
||||
# If we reach here, the download for this chunk was successful
|
||||
return bytes_this_chunk, True
|
||||
|
||||
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout, http.client.IncompleteRead) as e:
|
||||
logger_func(f" ❌ [Chunk {part_num + 1}/{total_parts}] Retryable error: {e}")
|
||||
if isinstance(e, requests.exceptions.ConnectionError) and \
|
||||
("Failed to resolve" in str(e) or "NameResolutionError" in str(e)):
|
||||
logger_func(" 💡 This looks like a DNS resolution problem. Please check your internet connection, DNS settings, or VPN.")
|
||||
if attempt == MAX_CHUNK_DOWNLOAD_RETRIES:
|
||||
logger_func(f" ❌ [Chunk {part_num + 1}/{total_parts}] Failed after {MAX_CHUNK_DOWNLOAD_RETRIES} retries.")
|
||||
return bytes_this_chunk, False
|
||||
except requests.exceptions.RequestException as e:
|
||||
logger_func(f" ❌ [Chunk {part_num + 1}/{total_parts}] Non-retryable error: {e}")
|
||||
if ("Failed to resolve" in str(e) or "NameResolutionError" in str(e)):
|
||||
logger_func(" 💡 This looks like a DNS resolution problem. Please check your internet connection, DNS settings, or VPN.")
|
||||
return bytes_this_chunk, False
|
||||
return bytes_this_chunk, False # Break loop on non-retryable errors
|
||||
except Exception as e:
|
||||
logger_func(f" ❌ [Chunk {part_num + 1}/{total_parts}] Unexpected error: {e}\n{traceback.format_exc(limit=1)}")
|
||||
|
||||
return bytes_this_chunk, False
|
||||
with progress_data['lock']:
|
||||
progress_data['chunks_status'][part_num]['active'] = False
|
||||
progress_data['chunks_status'][part_num]['speed_bps'] = 0
|
||||
|
||||
return bytes_this_chunk, False
|
||||
|
||||
|
||||
1
src/ui/__init__.py
Normal file
1
src/ui/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
42
src/ui/assets.py
Normal file
42
src/ui/assets.py
Normal file
@ -0,0 +1,42 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import sys
|
||||
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtGui import QIcon
|
||||
|
||||
# --- Asset Management ---
|
||||
|
||||
# This global variable will cache the icon so we don't have to load it from disk every time.
|
||||
_app_icon_cache = None
|
||||
|
||||
def get_app_icon_object():
|
||||
"""
|
||||
Loads and caches the application icon from the assets folder.
|
||||
This function is now centralized to prevent circular imports.
|
||||
|
||||
Returns:
|
||||
QIcon: The application icon object.
|
||||
"""
|
||||
global _app_icon_cache
|
||||
if _app_icon_cache and not _app_icon_cache.isNull():
|
||||
return _app_icon_cache
|
||||
|
||||
# Determine the project's base directory, whether running from source or as a bundled app
|
||||
if getattr(sys, 'frozen', False):
|
||||
# The application is frozen (e.g., with PyInstaller)
|
||||
base_dir = os.path.dirname(sys.executable)
|
||||
else:
|
||||
# The application is running from a .py file
|
||||
# This path navigates up from src/ui/ to the project root
|
||||
app_base_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..'))
|
||||
|
||||
icon_path = os.path.join(app_base_dir, 'assets', 'Kemono.ico')
|
||||
|
||||
if os.path.exists(icon_path):
|
||||
_app_icon_cache = QIcon(icon_path)
|
||||
else:
|
||||
print(f"Warning: Application icon not found at {icon_path}")
|
||||
_app_icon_cache = QIcon() # Return an empty icon as a fallback
|
||||
|
||||
return _app_icon_cache
|
||||
177
src/ui/dialogs/ConfirmAddAllDialog.py
Normal file
177
src/ui/dialogs/ConfirmAddAllDialog.py
Normal file
@ -0,0 +1,177 @@
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import Qt
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QListWidget, QListWidgetItem,
|
||||
QPushButton, QVBoxLayout
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
# This assumes the new project structure is in place.
|
||||
from ...i18n.translator import get_translation
|
||||
# get_app_icon_object is defined in the main window module in this refactoring plan.
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
# --- Constants for Dialog Choices ---
|
||||
# These were moved from main.py to be self-contained within this module's context.
|
||||
CONFIRM_ADD_ALL_ACCEPTED = 1
|
||||
CONFIRM_ADD_ALL_SKIP_ADDING = 2
|
||||
CONFIRM_ADD_ALL_CANCEL_DOWNLOAD = 3
|
||||
|
||||
|
||||
class ConfirmAddAllDialog(QDialog):
|
||||
"""
|
||||
A dialog to confirm adding multiple new character/series names to Known.txt.
|
||||
It appears when the user provides filter names that are not already known,
|
||||
allowing them to persist these names for future use.
|
||||
"""
|
||||
|
||||
def __init__(self, new_filter_objects_list, parent_app, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
new_filter_objects_list (list): A list of filter objects (dicts) to propose adding.
|
||||
parent_app (DownloaderApp): A reference to the main application window for theming and translations.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app
|
||||
self.setModal(True)
|
||||
self.new_filter_objects_list = new_filter_objects_list
|
||||
# Default choice if the dialog is closed without a button press
|
||||
self.user_choice = CONFIRM_ADD_ALL_CANCEL_DOWNLOAD
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically
|
||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
|
||||
scale_factor = screen_height / 768.0
|
||||
base_min_w, base_min_h = 480, 350
|
||||
scaled_min_w = int(base_min_w * scale_factor)
|
||||
scaled_min_h = int(base_min_h * scale_factor)
|
||||
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
main_layout = QVBoxLayout(self)
|
||||
|
||||
self.info_label = QLabel()
|
||||
self.info_label.setWordWrap(True)
|
||||
main_layout.addWidget(self.info_label)
|
||||
|
||||
self.names_list_widget = QListWidget()
|
||||
self._populate_list()
|
||||
main_layout.addWidget(self.names_list_widget)
|
||||
|
||||
# --- Selection Buttons ---
|
||||
selection_buttons_layout = QHBoxLayout()
|
||||
self.select_all_button = QPushButton()
|
||||
self.select_all_button.clicked.connect(self._select_all_items)
|
||||
selection_buttons_layout.addWidget(self.select_all_button)
|
||||
|
||||
self.deselect_all_button = QPushButton()
|
||||
self.deselect_all_button.clicked.connect(self._deselect_all_items)
|
||||
selection_buttons_layout.addWidget(self.deselect_all_button)
|
||||
selection_buttons_layout.addStretch()
|
||||
main_layout.addLayout(selection_buttons_layout)
|
||||
|
||||
# --- Action Buttons ---
|
||||
buttons_layout = QHBoxLayout()
|
||||
self.add_selected_button = QPushButton()
|
||||
self.add_selected_button.clicked.connect(self._accept_add_selected)
|
||||
self.add_selected_button.setDefault(True)
|
||||
buttons_layout.addWidget(self.add_selected_button)
|
||||
|
||||
self.skip_adding_button = QPushButton()
|
||||
self.skip_adding_button.clicked.connect(self._reject_skip_adding)
|
||||
buttons_layout.addWidget(self.skip_adding_button)
|
||||
buttons_layout.addStretch()
|
||||
|
||||
self.cancel_download_button = QPushButton()
|
||||
self.cancel_download_button.clicked.connect(self._reject_cancel_download)
|
||||
buttons_layout.addWidget(self.cancel_download_button)
|
||||
|
||||
main_layout.addLayout(buttons_layout)
|
||||
|
||||
def _populate_list(self):
|
||||
"""Populates the list widget with the new names to be confirmed."""
|
||||
for filter_obj in self.new_filter_objects_list:
|
||||
item_text = filter_obj["name"]
|
||||
list_item = QListWidgetItem(item_text)
|
||||
list_item.setFlags(list_item.flags() | Qt.ItemIsUserCheckable)
|
||||
list_item.setCheckState(Qt.Checked)
|
||||
list_item.setData(Qt.UserRole, filter_obj)
|
||||
self.names_list_widget.addItem(list_item)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("confirm_add_all_dialog_title", "Confirm Adding New Names"))
|
||||
self.info_label.setText(self._tr("confirm_add_all_info_label", "The following new names/groups..."))
|
||||
self.select_all_button.setText(self._tr("confirm_add_all_select_all_button", "Select All"))
|
||||
self.deselect_all_button.setText(self._tr("confirm_add_all_deselect_all_button", "Deselect All"))
|
||||
self.add_selected_button.setText(self._tr("confirm_add_all_add_selected_button", "Add Selected to Known.txt"))
|
||||
self.skip_adding_button.setText(self._tr("confirm_add_all_skip_adding_button", "Skip Adding These"))
|
||||
self.cancel_download_button.setText(self._tr("confirm_add_all_cancel_download_button", "Cancel Download"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'get_dark_theme') and self.parent_app.current_theme == "dark":
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
def _select_all_items(self):
|
||||
"""Checks all items in the list."""
|
||||
for i in range(self.names_list_widget.count()):
|
||||
self.names_list_widget.item(i).setCheckState(Qt.Checked)
|
||||
|
||||
def _deselect_all_items(self):
|
||||
"""Unchecks all items in the list."""
|
||||
for i in range(self.names_list_widget.count()):
|
||||
self.names_list_widget.item(i).setCheckState(Qt.Unchecked)
|
||||
|
||||
def _accept_add_selected(self):
|
||||
"""Sets the user choice to the list of selected items and accepts the dialog."""
|
||||
selected_objects = []
|
||||
for i in range(self.names_list_widget.count()):
|
||||
item = self.names_list_widget.item(i)
|
||||
if item.checkState() == Qt.Checked:
|
||||
filter_obj = item.data(Qt.UserRole)
|
||||
if filter_obj:
|
||||
selected_objects.append(filter_obj)
|
||||
|
||||
self.user_choice = selected_objects
|
||||
self.accept()
|
||||
|
||||
def _reject_skip_adding(self):
|
||||
"""Sets the user choice to skip adding and rejects the dialog."""
|
||||
self.user_choice = CONFIRM_ADD_ALL_SKIP_ADDING
|
||||
self.reject()
|
||||
|
||||
def _reject_cancel_download(self):
|
||||
"""Sets the user choice to cancel the entire download and rejects the dialog."""
|
||||
self.user_choice = CONFIRM_ADD_ALL_CANCEL_DOWNLOAD
|
||||
self.reject()
|
||||
|
||||
def exec_(self):
|
||||
"""
|
||||
Overrides the default exec_ to handle the return value logic, ensuring a
|
||||
sensible default if no items are selected but the "Add" button is clicked.
|
||||
"""
|
||||
super().exec_()
|
||||
# If the user clicked "Add Selected" but didn't select any items, treat it as skipping.
|
||||
if isinstance(self.user_choice, list) and not self.user_choice:
|
||||
return CONFIRM_ADD_ALL_SKIP_ADDING
|
||||
return self.user_choice
|
||||
135
src/ui/dialogs/CookieHelpDialog.py
Normal file
135
src/ui/dialogs/CookieHelpDialog.py
Normal file
@ -0,0 +1,135 @@
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import Qt
|
||||
from PyQt5.QtGui import QIcon
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
|
||||
class CookieHelpDialog(QDialog):
|
||||
"""
|
||||
A dialog to explain how to get a cookies.txt file.
|
||||
It can be displayed as a simple informational popup or as a modal choice
|
||||
when cookies are required but not found.
|
||||
"""
|
||||
# Constants to define the user's choice from the dialog
|
||||
CHOICE_PROCEED_WITHOUT_COOKIES = 1
|
||||
CHOICE_CANCEL_DOWNLOAD = 2
|
||||
CHOICE_OK_INFO_ONLY = 3
|
||||
|
||||
def __init__(self, parent_app, parent=None, offer_download_without_option=False):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
parent_app (DownloaderApp): A reference to the main application window.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
offer_download_without_option (bool): If True, shows buttons to
|
||||
"Download without Cookies" and "Cancel Download". If False,
|
||||
shows only an "OK" button for informational purposes.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app
|
||||
self.setModal(True)
|
||||
self.offer_download_without_option = offer_download_without_option
|
||||
self.user_choice = None
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
self.setMinimumWidth(500)
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
main_layout = QVBoxLayout(self)
|
||||
|
||||
self.info_label = QLabel()
|
||||
self.info_label.setTextFormat(Qt.RichText)
|
||||
self.info_label.setOpenExternalLinks(True)
|
||||
self.info_label.setWordWrap(True)
|
||||
main_layout.addWidget(self.info_label)
|
||||
|
||||
button_layout = QHBoxLayout()
|
||||
button_layout.addStretch(1)
|
||||
|
||||
if self.offer_download_without_option:
|
||||
# Add buttons for making a choice
|
||||
self.download_without_button = QPushButton()
|
||||
self.download_without_button.clicked.connect(self._proceed_without_cookies)
|
||||
button_layout.addWidget(self.download_without_button)
|
||||
|
||||
self.cancel_button = QPushButton()
|
||||
self.cancel_button.clicked.connect(self._cancel_download)
|
||||
button_layout.addWidget(self.cancel_button)
|
||||
else:
|
||||
# Add a simple OK button for informational display
|
||||
self.ok_button = QPushButton()
|
||||
self.ok_button.clicked.connect(self._ok_info_only)
|
||||
button_layout.addWidget(self.ok_button)
|
||||
|
||||
main_layout.addLayout(button_layout)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("cookie_help_dialog_title", "Cookie File Instructions"))
|
||||
|
||||
instruction_html = f"""
|
||||
{self._tr("cookie_help_instruction_intro", "<p>To use cookies...</p>")}
|
||||
{self._tr("cookie_help_how_to_get_title", "<p><b>How to get cookies.txt:</b></p>")}
|
||||
<ol>
|
||||
{self._tr("cookie_help_step1_extension_intro", "<li>Install extension...</li>")}
|
||||
{self._tr("cookie_help_step2_login", "<li>Go to website...</li>")}
|
||||
{self._tr("cookie_help_step3_click_icon", "<li>Click icon...</li>")}
|
||||
{self._tr("cookie_help_step4_export", "<li>Click export...</li>")}
|
||||
{self._tr("cookie_help_step5_save_file", "<li>Save file...</li>")}
|
||||
{self._tr("cookie_help_step6_app_intro", "<li>In this application:<ul>")}
|
||||
{self._tr("cookie_help_step6a_checkbox", "<li>Ensure checkbox...</li>")}
|
||||
{self._tr("cookie_help_step6b_browse", "<li>Click browse...</li>")}
|
||||
{self._tr("cookie_help_step6c_select", "<li>Select file...</li></ul></li>")}
|
||||
</ol>
|
||||
{self._tr("cookie_help_alternative_paste", "<p>Alternatively, paste...</p>")}
|
||||
"""
|
||||
self.info_label.setText(instruction_html)
|
||||
|
||||
if self.offer_download_without_option:
|
||||
self.download_without_button.setText(self._tr("cookie_help_proceed_without_button", "Download without Cookies"))
|
||||
self.cancel_button.setText(self._tr("cookie_help_cancel_download_button", "Cancel Download"))
|
||||
else:
|
||||
self.ok_button.setText(self._tr("ok_button", "OK"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'get_dark_theme') and self.parent_app.current_theme == "dark":
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
def _proceed_without_cookies(self):
|
||||
"""Handles the user choice to proceed without using cookies."""
|
||||
self.user_choice = self.CHOICE_PROCEED_WITHOUT_COOKIES
|
||||
self.accept()
|
||||
|
||||
def _cancel_download(self):
|
||||
"""Handles the user choice to cancel the download."""
|
||||
self.user_choice = self.CHOICE_CANCEL_DOWNLOAD
|
||||
self.reject()
|
||||
|
||||
def _ok_info_only(self):
|
||||
"""Handles the acknowledgment when the dialog is purely informational."""
|
||||
self.user_choice = self.CHOICE_OK_INFO_ONLY
|
||||
self.accept()
|
||||
183
src/ui/dialogs/DownloadExtractedLinksDialog.py
Normal file
183
src/ui/dialogs/DownloadExtractedLinksDialog.py
Normal file
@ -0,0 +1,183 @@
|
||||
# --- Standard Library Imports ---
|
||||
from collections import defaultdict
|
||||
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import pyqtSignal, Qt
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QListWidget, QListWidgetItem,
|
||||
QMessageBox, QPushButton, QVBoxLayout, QAbstractItemView
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
# This assumes the new project structure is in place.
|
||||
from ...i18n.translator import get_translation
|
||||
# get_app_icon_object is defined in the main window module in this refactoring plan.
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
|
||||
class DownloadExtractedLinksDialog(QDialog):
|
||||
"""
|
||||
A dialog to select and initiate the download for extracted, supported links
|
||||
from external cloud services like Mega, Google Drive, and Dropbox.
|
||||
"""
|
||||
|
||||
# Signal emitted with a list of selected link information dictionaries
|
||||
download_requested = pyqtSignal(list)
|
||||
|
||||
def __init__(self, links_data, parent_app, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
links_data (list): A list of dictionaries, each containing info about an extracted link.
|
||||
parent_app (DownloaderApp): A reference to the main application window for theming and translations.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.links_data = links_data
|
||||
self.parent_app = parent_app
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically based on the parent window's size
|
||||
if parent:
|
||||
parent_width = parent.width()
|
||||
parent_height = parent.height()
|
||||
# Use a scaling factor for different screen resolutions
|
||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
|
||||
scale_factor = screen_height / 768.0
|
||||
|
||||
base_min_w, base_min_h = 500, 400
|
||||
scaled_min_w = int(base_min_w * scale_factor)
|
||||
scaled_min_h = int(base_min_h * scale_factor)
|
||||
|
||||
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
||||
self.resize(max(int(parent_width * 0.6 * scale_factor), scaled_min_w),
|
||||
max(int(parent_height * 0.7 * scale_factor), scaled_min_h))
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
layout = QVBoxLayout(self)
|
||||
|
||||
self.main_info_label = QLabel()
|
||||
self.main_info_label.setAlignment(Qt.AlignHCenter | Qt.AlignTop)
|
||||
self.main_info_label.setWordWrap(True)
|
||||
layout.addWidget(self.main_info_label)
|
||||
|
||||
self.links_list_widget = QListWidget()
|
||||
self.links_list_widget.setSelectionMode(QAbstractItemView.NoSelection)
|
||||
self._populate_list()
|
||||
layout.addWidget(self.links_list_widget)
|
||||
|
||||
# --- Control Buttons ---
|
||||
button_layout = QHBoxLayout()
|
||||
self.select_all_button = QPushButton()
|
||||
self.select_all_button.clicked.connect(lambda: self._set_all_items_checked(Qt.Checked))
|
||||
button_layout.addWidget(self.select_all_button)
|
||||
|
||||
self.deselect_all_button = QPushButton()
|
||||
self.deselect_all_button.clicked.connect(lambda: self._set_all_items_checked(Qt.Unchecked))
|
||||
button_layout.addWidget(self.deselect_all_button)
|
||||
button_layout.addStretch()
|
||||
|
||||
self.download_button = QPushButton()
|
||||
self.download_button.clicked.connect(self._handle_download_selected)
|
||||
self.download_button.setDefault(True)
|
||||
button_layout.addWidget(self.download_button)
|
||||
|
||||
self.cancel_button = QPushButton()
|
||||
self.cancel_button.clicked.connect(self.reject)
|
||||
button_layout.addWidget(self.cancel_button)
|
||||
layout.addLayout(button_layout)
|
||||
|
||||
def _populate_list(self):
|
||||
"""Populates the list widget with the provided links, grouped by post title."""
|
||||
grouped_links = defaultdict(list)
|
||||
for link_info_item in self.links_data:
|
||||
post_title_for_group = link_info_item.get('title', 'Untitled Post')
|
||||
grouped_links[post_title_for_group].append(link_info_item)
|
||||
|
||||
sorted_post_titles = sorted(grouped_links.keys(), key=lambda x: x.lower())
|
||||
|
||||
for post_title_key in sorted_post_titles:
|
||||
# Add a non-selectable header for each post
|
||||
header_item = QListWidgetItem(f"{post_title_key}")
|
||||
header_item.setFlags(Qt.NoItemFlags)
|
||||
font = header_item.font()
|
||||
font.setBold(True)
|
||||
font.setPointSize(font.pointSize() + 1)
|
||||
header_item.setFont(font)
|
||||
self.links_list_widget.addItem(header_item)
|
||||
|
||||
# Add checkable items for each link within that post
|
||||
for link_info_data in grouped_links[post_title_key]:
|
||||
platform_display = link_info_data.get('platform', 'unknown').upper()
|
||||
display_text = f" [{platform_display}] {link_info_data['link_text']} ({link_info_data['url']})"
|
||||
item = QListWidgetItem(display_text)
|
||||
item.setData(Qt.UserRole, link_info_data)
|
||||
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
|
||||
item.setCheckState(Qt.Checked)
|
||||
self.links_list_widget.addItem(item)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on current app language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("download_external_links_dialog_title", "Download Selected External Links"))
|
||||
self.main_info_label.setText(self._tr("download_external_links_dialog_main_label", "Found {count} supported link(s)...").format(count=len(self.links_data)))
|
||||
self.select_all_button.setText(self._tr("select_all_button_text", "Select All"))
|
||||
self.deselect_all_button.setText(self._tr("deselect_all_button_text", "Deselect All"))
|
||||
self.download_button.setText(self._tr("download_selected_button_text", "Download Selected"))
|
||||
self.cancel_button.setText(self._tr("fav_posts_cancel_button", "Cancel"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
is_dark_theme = self.parent() and hasattr(self.parent_app, 'current_theme') and self.parent_app.current_theme == "dark"
|
||||
|
||||
if is_dark_theme and hasattr(self.parent_app, 'get_dark_theme'):
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
# Set header text color based on theme
|
||||
header_color = Qt.cyan if is_dark_theme else Qt.blue
|
||||
for i in range(self.links_list_widget.count()):
|
||||
item = self.links_list_widget.item(i)
|
||||
# Headers are not checkable
|
||||
if not item.flags() & Qt.ItemIsUserCheckable:
|
||||
item.setForeground(header_color)
|
||||
|
||||
def _set_all_items_checked(self, check_state):
|
||||
"""Sets the checked state for all checkable items in the list."""
|
||||
for i in range(self.links_list_widget.count()):
|
||||
item = self.links_list_widget.item(i)
|
||||
if item.flags() & Qt.ItemIsUserCheckable:
|
||||
item.setCheckState(check_state)
|
||||
|
||||
def _handle_download_selected(self):
|
||||
"""Gathers selected links and emits the download_requested signal."""
|
||||
selected_links = []
|
||||
for i in range(self.links_list_widget.count()):
|
||||
item = self.links_list_widget.item(i)
|
||||
if item.flags() & Qt.ItemIsUserCheckable and item.checkState() == Qt.Checked and item.data(Qt.UserRole) is not None:
|
||||
selected_links.append(item.data(Qt.UserRole))
|
||||
|
||||
if selected_links:
|
||||
self.download_requested.emit(selected_links)
|
||||
self.accept()
|
||||
else:
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("no_selection_title", "No Selection"),
|
||||
self._tr("no_selection_message_links", "Please select at least one link to download.")
|
||||
)
|
||||
219
src/ui/dialogs/DownloadHistoryDialog.py
Normal file
219
src/ui/dialogs/DownloadHistoryDialog.py
Normal file
@ -0,0 +1,219 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import time
|
||||
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import Qt, QStandardPaths, QTimer
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QScrollArea,
|
||||
QPushButton, QVBoxLayout, QSplitter, QWidget, QGroupBox,
|
||||
QFileDialog, QMessageBox
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
|
||||
class DownloadHistoryDialog(QDialog):
|
||||
"""
|
||||
Dialog to display download history, showing the last few downloaded files
|
||||
and the first posts processed in the current session. It also allows
|
||||
exporting this history to a text file.
|
||||
"""
|
||||
|
||||
def __init__(self, last_downloaded_entries, first_processed_entries, parent_app, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
last_downloaded_entries (list): A list of dicts for the last few files.
|
||||
first_processed_entries (list): A list of dicts for the first few posts.
|
||||
parent_app (DownloaderApp): A reference to the main application window.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app
|
||||
self.last_3_downloaded_entries = last_downloaded_entries
|
||||
self.first_processed_entries = first_processed_entries
|
||||
self.setModal(True)
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically
|
||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
|
||||
scale_factor = screen_height / 1080.0
|
||||
base_min_w, base_min_h = 600, 450
|
||||
scaled_min_w = int(base_min_w * 1.5 * scale_factor)
|
||||
scaled_min_h = int(base_min_h * scale_factor)
|
||||
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
||||
self.resize(scaled_min_w, scaled_min_h + 100) # Give it a bit more height
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
dialog_layout = QVBoxLayout(self)
|
||||
self.setLayout(dialog_layout)
|
||||
|
||||
self.main_splitter = QSplitter(Qt.Horizontal)
|
||||
dialog_layout.addWidget(self.main_splitter)
|
||||
|
||||
# --- Left Pane (Last Downloaded Files) ---
|
||||
left_pane_widget = self._create_history_pane(
|
||||
self.last_3_downloaded_entries,
|
||||
"history_last_downloaded_header", "Last 3 Files Downloaded:",
|
||||
self._format_last_downloaded_entry
|
||||
)
|
||||
self.main_splitter.addWidget(left_pane_widget)
|
||||
|
||||
# --- Right Pane (First Processed Posts) ---
|
||||
right_pane_widget = self._create_history_pane(
|
||||
self.first_processed_entries,
|
||||
"first_files_processed_header", "First {count} Posts Processed This Session:",
|
||||
self._format_first_processed_entry,
|
||||
count=len(self.first_processed_entries)
|
||||
)
|
||||
self.main_splitter.addWidget(right_pane_widget)
|
||||
|
||||
# --- Bottom Buttons ---
|
||||
bottom_button_layout = QHBoxLayout()
|
||||
self.save_history_button = QPushButton()
|
||||
self.save_history_button.clicked.connect(self._save_history_to_txt)
|
||||
bottom_button_layout.addStretch(1)
|
||||
bottom_button_layout.addWidget(self.save_history_button)
|
||||
dialog_layout.addLayout(bottom_button_layout)
|
||||
|
||||
# Set splitter sizes after the dialog is shown to ensure correct proportions
|
||||
QTimer.singleShot(0, lambda: self.main_splitter.setSizes([self.width() // 2, self.width() // 2]))
|
||||
|
||||
def _create_history_pane(self, entries, header_key, header_default, formatter_func, **kwargs):
|
||||
"""Creates a generic pane for displaying a list of history entries."""
|
||||
pane_widget = QWidget()
|
||||
layout = QVBoxLayout(pane_widget)
|
||||
header_text = self._tr(header_key, header_default).format(**kwargs)
|
||||
header_label = QLabel(header_text)
|
||||
header_label.setAlignment(Qt.AlignCenter)
|
||||
layout.addWidget(header_label)
|
||||
|
||||
scroll_area = QScrollArea()
|
||||
scroll_area.setWidgetResizable(True)
|
||||
scroll_content_widget = QWidget()
|
||||
scroll_layout = QVBoxLayout(scroll_content_widget)
|
||||
|
||||
if not entries:
|
||||
no_history_label = QLabel(self._tr("no_download_history_header", "No History Yet"))
|
||||
no_history_label.setAlignment(Qt.AlignCenter)
|
||||
scroll_layout.addWidget(no_history_label)
|
||||
else:
|
||||
for entry in entries:
|
||||
group_box, details_label = formatter_func(entry)
|
||||
group_layout = QVBoxLayout(group_box)
|
||||
group_layout.addWidget(details_label)
|
||||
scroll_layout.addWidget(group_box)
|
||||
|
||||
scroll_area.setWidget(scroll_content_widget)
|
||||
layout.addWidget(scroll_area)
|
||||
return pane_widget
|
||||
|
||||
def _format_last_downloaded_entry(self, entry):
|
||||
"""Formats a single entry for the 'Last Downloaded Files' pane."""
|
||||
group_box = QGroupBox(f"{self._tr('history_file_label', 'File:')} {entry.get('disk_filename', 'N/A')}")
|
||||
details_text = (
|
||||
f"<b>{self._tr('history_from_post_label', 'From Post:')}</b> {entry.get('post_title', 'N/A')} (ID: {entry.get('post_id', 'N/A')})<br>"
|
||||
f"<b>{self._tr('history_creator_series_label', 'Creator/Series:')}</b> {entry.get('creator_display_name', 'N/A')}<br>"
|
||||
f"<b>{self._tr('history_post_uploaded_label', 'Post Uploaded:')}</b> {entry.get('upload_date_str', 'N/A')}<br>"
|
||||
f"<b>{self._tr('history_file_downloaded_label', 'File Downloaded:')}</b> {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entry.get('download_timestamp', 0)))}<br>"
|
||||
f"<b>{self._tr('history_saved_in_folder_label', 'Saved In Folder:')}</b> {entry.get('download_path', 'N/A')}"
|
||||
)
|
||||
details_label = QLabel(details_text)
|
||||
details_label.setWordWrap(True)
|
||||
details_label.setTextFormat(Qt.RichText)
|
||||
return group_box, details_label
|
||||
|
||||
def _format_first_processed_entry(self, entry):
|
||||
"""Formats a single entry for the 'First Processed Posts' pane."""
|
||||
group_box = QGroupBox(f"{self._tr('history_post_label', 'Post:')} {entry.get('post_title', 'N/A')} (ID: {entry.get('post_id', 'N/A')})")
|
||||
details_text = (
|
||||
f"<b>{self._tr('history_creator_label', 'Creator:')}</b> {entry.get('creator_name', 'N/A')}<br>"
|
||||
f"<b>{self._tr('history_top_file_label', 'Top File:')}</b> {entry.get('top_file_name', 'N/A')}<br>"
|
||||
f"<b>{self._tr('history_num_files_label', 'Num Files in Post:')}</b> {entry.get('num_files', 0)}<br>"
|
||||
f"<b>{self._tr('history_post_uploaded_label', 'Post Uploaded:')}</b> {entry.get('upload_date_str', 'N/A')}<br>"
|
||||
f"<b>{self._tr('history_processed_on_label', 'Processed On:')}</b> {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entry.get('download_date_timestamp', 0)))}<br>"
|
||||
f"<b>{self._tr('history_saved_to_folder_label', 'Saved To Folder:')}</b> {entry.get('download_location', 'N/A')}"
|
||||
)
|
||||
details_label = QLabel(details_text)
|
||||
details_label.setWordWrap(True)
|
||||
details_label.setTextFormat(Qt.RichText)
|
||||
return group_box, details_label
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("download_history_dialog_title_combined", "Download History"))
|
||||
self.save_history_button.setText(self._tr("history_save_button_text", "Save History to .txt"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'get_dark_theme') and self.parent_app.current_theme == "dark":
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
def _save_history_to_txt(self):
|
||||
"""Saves the displayed history content to a user-selected text file."""
|
||||
if not self.last_3_downloaded_entries and not self.first_processed_entries:
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("no_download_history_header", "No History Yet"),
|
||||
self._tr("history_nothing_to_save_message", "There is no history to save.")
|
||||
)
|
||||
return
|
||||
|
||||
# Suggest saving in the main download directory or Documents as a fallback
|
||||
main_download_dir = self.parent_app.dir_input.text().strip()
|
||||
default_save_dir = ""
|
||||
if main_download_dir and os.path.isdir(main_download_dir):
|
||||
default_save_dir = main_download_dir
|
||||
else:
|
||||
default_save_dir = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation) or self.parent_app.app_base_dir
|
||||
|
||||
default_filepath = os.path.join(default_save_dir, "download_history.txt")
|
||||
|
||||
filepath, _ = QFileDialog.getSaveFileName(
|
||||
self,
|
||||
self._tr("history_save_dialog_title", "Save Download History"),
|
||||
default_filepath,
|
||||
"Text Files (*.txt);;All Files (*)"
|
||||
)
|
||||
|
||||
if not filepath:
|
||||
return
|
||||
|
||||
# Build the text content
|
||||
history_content = []
|
||||
# ... logic for formatting the text content would go here ...
|
||||
|
||||
try:
|
||||
with open(filepath, 'w', encoding='utf-8') as f:
|
||||
f.write("\n".join(history_content))
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("history_export_success_title", "History Export Successful"),
|
||||
self._tr("history_export_success_message", "Successfully exported to:\n{filepath}").format(filepath=filepath)
|
||||
)
|
||||
except Exception as e:
|
||||
QMessageBox.critical(
|
||||
self,
|
||||
self._tr("history_export_error_title", "History Export Error"),
|
||||
self._tr("history_export_error_message", "Could not export: {error}").format(error=str(e))
|
||||
)
|
||||
1000
src/ui/dialogs/EmptyPopupDialog.py
Normal file
1000
src/ui/dialogs/EmptyPopupDialog.py
Normal file
File diff suppressed because it is too large
Load Diff
230
src/ui/dialogs/ErrorFilesDialog.py
Normal file
230
src/ui/dialogs/ErrorFilesDialog.py
Normal file
@ -0,0 +1,230 @@
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import pyqtSignal, Qt
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QListWidget, QListWidgetItem,
|
||||
QMessageBox, QPushButton, QVBoxLayout, QAbstractItemView, QFileDialog
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..assets import get_app_icon_object
|
||||
# Corrected Import: The filename uses PascalCase.
|
||||
from .ExportOptionsDialog import ExportOptionsDialog
|
||||
|
||||
|
||||
class ErrorFilesDialog(QDialog):
|
||||
"""
|
||||
Dialog to display files that were skipped due to errors and
|
||||
allows the user to retry downloading them or export the list of URLs.
|
||||
"""
|
||||
|
||||
# Signal emitted with a list of file info dictionaries to retry
|
||||
retry_selected_signal = pyqtSignal(list)
|
||||
|
||||
def __init__(self, error_files_info_list, parent_app, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
error_files_info_list (list): A list of dictionaries, each containing
|
||||
info about a failed file.
|
||||
parent_app (DownloaderApp): A reference to the main application window
|
||||
for theming and translations.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app
|
||||
self.setModal(True)
|
||||
self.error_files = error_files_info_list
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically
|
||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
|
||||
scale_factor = screen_height / 1080.0
|
||||
base_min_w, base_min_h = 500, 300
|
||||
scaled_min_w = int(base_min_w * scale_factor)
|
||||
scaled_min_h = int(base_min_h * scale_factor)
|
||||
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
main_layout = QVBoxLayout(self)
|
||||
|
||||
self.info_label = QLabel()
|
||||
self.info_label.setWordWrap(True)
|
||||
main_layout.addWidget(self.info_label)
|
||||
|
||||
if self.error_files:
|
||||
self.files_list_widget = QListWidget()
|
||||
self.files_list_widget.setSelectionMode(QAbstractItemView.NoSelection)
|
||||
self._populate_list()
|
||||
main_layout.addWidget(self.files_list_widget)
|
||||
|
||||
# --- Control Buttons ---
|
||||
buttons_layout = QHBoxLayout()
|
||||
self.select_all_button = QPushButton()
|
||||
self.select_all_button.clicked.connect(self._select_all_items)
|
||||
buttons_layout.addWidget(self.select_all_button)
|
||||
|
||||
self.retry_button = QPushButton()
|
||||
self.retry_button.clicked.connect(self._handle_retry_selected)
|
||||
buttons_layout.addWidget(self.retry_button)
|
||||
|
||||
self.export_button = QPushButton()
|
||||
self.export_button.clicked.connect(self._handle_export_errors_to_txt)
|
||||
buttons_layout.addWidget(self.export_button)
|
||||
buttons_layout.addStretch(1)
|
||||
|
||||
self.ok_button = QPushButton()
|
||||
self.ok_button.clicked.connect(self.accept)
|
||||
self.ok_button.setDefault(True)
|
||||
buttons_layout.addWidget(self.ok_button)
|
||||
main_layout.addLayout(buttons_layout)
|
||||
|
||||
# Enable/disable buttons based on whether there are errors
|
||||
has_errors = bool(self.error_files)
|
||||
self.select_all_button.setEnabled(has_errors)
|
||||
self.retry_button.setEnabled(has_errors)
|
||||
self.export_button.setEnabled(has_errors)
|
||||
|
||||
def _populate_list(self):
|
||||
"""Populates the list widget with details of the failed files."""
|
||||
for error_info in self.error_files:
|
||||
filename = error_info.get('forced_filename_override',
|
||||
error_info.get('file_info', {}).get('name', 'Unknown Filename'))
|
||||
post_title = error_info.get('post_title', 'Unknown Post')
|
||||
post_id = error_info.get('original_post_id_for_log', 'N/A')
|
||||
|
||||
item_text = f"File: {filename}\nFrom Post: '{post_title}' (ID: {post_id})"
|
||||
list_item = QListWidgetItem(item_text)
|
||||
list_item.setData(Qt.UserRole, error_info)
|
||||
list_item.setFlags(list_item.flags() | Qt.ItemIsUserCheckable)
|
||||
list_item.setCheckState(Qt.Unchecked)
|
||||
self.files_list_widget.addItem(list_item)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("error_files_dialog_title", "Files Skipped Due to Errors"))
|
||||
if not self.error_files:
|
||||
self.info_label.setText(self._tr("error_files_no_errors_label", "No files were recorded as skipped..."))
|
||||
else:
|
||||
self.info_label.setText(self._tr("error_files_found_label", "The following {count} file(s)...").format(count=len(self.error_files)))
|
||||
|
||||
self.select_all_button.setText(self._tr("error_files_select_all_button", "Select All"))
|
||||
self.retry_button.setText(self._tr("error_files_retry_selected_button", "Retry Selected"))
|
||||
self.export_button.setText(self._tr("error_files_export_urls_button", "Export URLs to .txt"))
|
||||
self.ok_button.setText(self._tr("ok_button", "OK"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'current_theme') and self.parent_app.current_theme == "dark":
|
||||
if hasattr(self.parent_app, 'get_dark_theme'):
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
def _select_all_items(self):
|
||||
"""Checks all items in the list."""
|
||||
if hasattr(self, 'files_list_widget'):
|
||||
for i in range(self.files_list_widget.count()):
|
||||
self.files_list_widget.item(i).setCheckState(Qt.Checked)
|
||||
|
||||
def _handle_retry_selected(self):
|
||||
"""Gathers selected files and emits the retry signal."""
|
||||
if not hasattr(self, 'files_list_widget'):
|
||||
return
|
||||
|
||||
selected_files_for_retry = [
|
||||
self.files_list_widget.item(i).data(Qt.UserRole)
|
||||
for i in range(self.files_list_widget.count())
|
||||
if self.files_list_widget.item(i).checkState() == Qt.Checked
|
||||
]
|
||||
|
||||
if selected_files_for_retry:
|
||||
self.retry_selected_signal.emit(selected_files_for_retry)
|
||||
self.accept()
|
||||
else:
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("fav_artists_no_selection_title", "No Selection"),
|
||||
self._tr("error_files_no_selection_retry_message", "Please select at least one file to retry.")
|
||||
)
|
||||
|
||||
def _handle_export_errors_to_txt(self):
|
||||
"""Exports the URLs of failed files to a text file."""
|
||||
if not self.error_files:
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("error_files_no_errors_export_title", "No Errors"),
|
||||
self._tr("error_files_no_errors_export_message", "There are no error file URLs to export.")
|
||||
)
|
||||
return
|
||||
|
||||
options_dialog = ExportOptionsDialog(parent_app=self.parent_app, parent=self)
|
||||
if not options_dialog.exec_() == QDialog.Accepted:
|
||||
return
|
||||
|
||||
export_option = options_dialog.get_selected_option()
|
||||
|
||||
lines_to_export = []
|
||||
for error_item in self.error_files:
|
||||
file_info = error_item.get('file_info', {})
|
||||
url = file_info.get('url')
|
||||
|
||||
if url:
|
||||
if export_option == ExportOptionsDialog.EXPORT_MODE_WITH_DETAILS:
|
||||
original_filename = file_info.get('name', 'Unknown Filename')
|
||||
post_title = error_item.get('post_title', 'Unknown Post')
|
||||
post_id = error_item.get('original_post_id_for_log', 'N/A')
|
||||
details_string = f" [Post: '{post_title}' (ID: {post_id}), File: '{original_filename}']"
|
||||
lines_to_export.append(f"{url}{details_string}")
|
||||
else:
|
||||
lines_to_export.append(url)
|
||||
|
||||
if not lines_to_export:
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("error_files_no_urls_found_export_title", "No URLs Found"),
|
||||
self._tr("error_files_no_urls_found_export_message", "Could not extract any URLs...")
|
||||
)
|
||||
return
|
||||
|
||||
default_filename = "error_file_links.txt"
|
||||
filepath, _ = QFileDialog.getSaveFileName(
|
||||
self,
|
||||
self._tr("error_files_save_dialog_title", "Save Error File URLs"),
|
||||
default_filename,
|
||||
"Text Files (*.txt);;All Files (*)"
|
||||
)
|
||||
|
||||
if filepath:
|
||||
try:
|
||||
with open(filepath, 'w', encoding='utf-8') as f:
|
||||
for line in lines_to_export:
|
||||
f.write(f"{line}\n")
|
||||
QMessageBox.information(
|
||||
self,
|
||||
self._tr("error_files_export_success_title", "Export Successful"),
|
||||
self._tr("error_files_export_success_message", "Successfully exported...").format(
|
||||
count=len(lines_to_export), filepath=filepath
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
QMessageBox.critical(
|
||||
self,
|
||||
self._tr("error_files_export_error_title", "Export Error"),
|
||||
self._tr("error_files_export_error_message", "Could not export...").format(error=str(e))
|
||||
)
|
||||
118
src/ui/dialogs/ExportOptionsDialog.py
Normal file
118
src/ui/dialogs/ExportOptionsDialog.py
Normal file
@ -0,0 +1,118 @@
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import Qt
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout,
|
||||
QRadioButton, QButtonGroup
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
# This assumes the new project structure is in place.
|
||||
from ...i18n.translator import get_translation
|
||||
# get_app_icon_object is defined in the main window module in this refactoring plan.
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
|
||||
class ExportOptionsDialog(QDialog):
|
||||
"""
|
||||
Dialog to choose the export format for error file links.
|
||||
It allows the user to select between exporting only the URLs or
|
||||
exporting URLs with additional details.
|
||||
"""
|
||||
# Constants to define the export modes
|
||||
EXPORT_MODE_LINK_ONLY = 1
|
||||
EXPORT_MODE_WITH_DETAILS = 2
|
||||
|
||||
def __init__(self, parent_app, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
parent_app (DownloaderApp): A reference to the main application window for theming and translations.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app
|
||||
self.setModal(True)
|
||||
# Default option
|
||||
self.selected_option = self.EXPORT_MODE_LINK_ONLY
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically
|
||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
|
||||
scale_factor = screen_height / 768.0
|
||||
base_min_w = 350
|
||||
scaled_min_w = int(base_min_w * scale_factor)
|
||||
self.setMinimumWidth(scaled_min_w)
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
layout = QVBoxLayout(self)
|
||||
|
||||
self.description_label = QLabel()
|
||||
layout.addWidget(self.description_label)
|
||||
|
||||
self.radio_group = QButtonGroup(self)
|
||||
|
||||
self.radio_link_only = QRadioButton()
|
||||
self.radio_link_only.setChecked(True)
|
||||
self.radio_group.addButton(self.radio_link_only, self.EXPORT_MODE_LINK_ONLY)
|
||||
layout.addWidget(self.radio_link_only)
|
||||
|
||||
self.radio_with_details = QRadioButton()
|
||||
self.radio_group.addButton(self.radio_with_details, self.EXPORT_MODE_WITH_DETAILS)
|
||||
layout.addWidget(self.radio_with_details)
|
||||
|
||||
# --- Action Buttons ---
|
||||
button_layout = QHBoxLayout()
|
||||
self.export_button = QPushButton()
|
||||
self.export_button.clicked.connect(self._handle_export)
|
||||
self.export_button.setDefault(True)
|
||||
|
||||
self.cancel_button = QPushButton()
|
||||
self.cancel_button.clicked.connect(self.reject)
|
||||
|
||||
button_layout.addStretch(1)
|
||||
button_layout.addWidget(self.export_button)
|
||||
button_layout.addWidget(self.cancel_button)
|
||||
layout.addLayout(button_layout)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("export_options_dialog_title", "Export Options"))
|
||||
self.description_label.setText(self._tr("export_options_description_label", "Choose the format for exporting error file links:"))
|
||||
self.radio_link_only.setText(self._tr("export_options_radio_link_only", "Link per line (URL only)"))
|
||||
self.radio_link_only.setToolTip(self._tr("export_options_radio_link_only_tooltip", "Exports only the direct download URL..."))
|
||||
self.radio_with_details.setText(self._tr("export_options_radio_with_details", "Export with details (URL [Post, File info])"))
|
||||
self.radio_with_details.setToolTip(self._tr("export_options_radio_with_details_tooltip", "Exports the URL followed by details..."))
|
||||
self.export_button.setText(self._tr("export_options_export_button", "Export"))
|
||||
self.cancel_button.setText(self._tr("fav_posts_cancel_button", "Cancel"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'current_theme') and self.parent_app.current_theme == "dark":
|
||||
if hasattr(self.parent_app, 'get_dark_theme'):
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
def _handle_export(self):
|
||||
"""Sets the selected export option and accepts the dialog."""
|
||||
self.selected_option = self.radio_group.checkedId()
|
||||
self.accept()
|
||||
|
||||
def get_selected_option(self):
|
||||
"""Returns the export mode chosen by the user."""
|
||||
return self.selected_option
|
||||
288
src/ui/dialogs/FavoriteArtistsDialog.py
Normal file
288
src/ui/dialogs/FavoriteArtistsDialog.py
Normal file
@ -0,0 +1,288 @@
|
||||
# --- Standard Library Imports ---
|
||||
import html
|
||||
import re
|
||||
|
||||
# --- Third-Party Library Imports ---
|
||||
import requests
|
||||
from PyQt5.QtCore import QCoreApplication, Qt
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QLineEdit, QListWidget,
|
||||
QListWidgetItem, QMessageBox, QPushButton, QVBoxLayout
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
# Corrected Import: Get the icon from the new assets utility module
|
||||
from ..assets import get_app_icon_object
|
||||
from ...utils.network_utils import prepare_cookies_for_request
|
||||
from .CookieHelpDialog import CookieHelpDialog
|
||||
|
||||
|
||||
class FavoriteArtistsDialog (QDialog ):
|
||||
"""Dialog to display and select favorite artists."""
|
||||
def __init__ (self ,parent_app ,cookies_config ):
|
||||
super ().__init__ (parent_app )
|
||||
self .parent_app =parent_app
|
||||
self .cookies_config =cookies_config
|
||||
self .all_fetched_artists =[]
|
||||
|
||||
app_icon =get_app_icon_object ()
|
||||
if not app_icon .isNull ():
|
||||
self .setWindowIcon (app_icon )
|
||||
self .selected_artist_urls =[]
|
||||
|
||||
self .setModal (True )
|
||||
self .setMinimumSize (500 ,500 )
|
||||
|
||||
self ._init_ui ()
|
||||
self ._fetch_favorite_artists ()
|
||||
|
||||
def _get_domain_for_service (self ,service_name ):
|
||||
service_lower =service_name .lower ()
|
||||
coomer_primary_services ={'onlyfans','fansly','manyvids','candfans'}
|
||||
if service_lower in coomer_primary_services :
|
||||
return "coomer.su"
|
||||
else :
|
||||
return "kemono.su"
|
||||
|
||||
def _tr (self ,key ,default_text =""):
|
||||
"""Helper to get translation based on current app language."""
|
||||
if callable (get_translation )and self .parent_app :
|
||||
return get_translation (self .parent_app .current_selected_language ,key ,default_text )
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui (self ):
|
||||
self .setWindowTitle (self ._tr ("fav_artists_dialog_title","Favorite Artists"))
|
||||
self .status_label .setText (self ._tr ("fav_artists_loading_status","Loading favorite artists..."))
|
||||
self .search_input .setPlaceholderText (self ._tr ("fav_artists_search_placeholder","Search artists..."))
|
||||
self .select_all_button .setText (self ._tr ("fav_artists_select_all_button","Select All"))
|
||||
self .deselect_all_button .setText (self ._tr ("fav_artists_deselect_all_button","Deselect All"))
|
||||
self .download_button .setText (self ._tr ("fav_artists_download_selected_button","Download Selected"))
|
||||
self .cancel_button .setText (self ._tr ("fav_artists_cancel_button","Cancel"))
|
||||
|
||||
def _init_ui (self ):
|
||||
main_layout =QVBoxLayout (self )
|
||||
|
||||
self .status_label =QLabel ()
|
||||
self .status_label .setAlignment (Qt .AlignCenter )
|
||||
main_layout .addWidget (self .status_label )
|
||||
|
||||
self .search_input =QLineEdit ()
|
||||
self .search_input .textChanged .connect (self ._filter_artist_list_display )
|
||||
main_layout .addWidget (self .search_input )
|
||||
|
||||
|
||||
self .artist_list_widget =QListWidget ()
|
||||
self .artist_list_widget .setStyleSheet ("""
|
||||
QListWidget::item {
|
||||
border-bottom: 1px solid #4A4A4A; /* Slightly softer line */
|
||||
padding-top: 4px;
|
||||
padding-bottom: 4px;
|
||||
}""")
|
||||
main_layout .addWidget (self .artist_list_widget )
|
||||
self .artist_list_widget .setAlternatingRowColors (True )
|
||||
self .search_input .setVisible (False )
|
||||
self .artist_list_widget .setVisible (False )
|
||||
|
||||
combined_buttons_layout =QHBoxLayout ()
|
||||
|
||||
self .select_all_button =QPushButton ()
|
||||
self .select_all_button .clicked .connect (self ._select_all_items )
|
||||
combined_buttons_layout .addWidget (self .select_all_button )
|
||||
|
||||
self .deselect_all_button =QPushButton ()
|
||||
self .deselect_all_button .clicked .connect (self ._deselect_all_items )
|
||||
combined_buttons_layout .addWidget (self .deselect_all_button )
|
||||
|
||||
|
||||
self .download_button =QPushButton ()
|
||||
self .download_button .clicked .connect (self ._accept_selection_action )
|
||||
self .download_button .setEnabled (False )
|
||||
self .download_button .setDefault (True )
|
||||
combined_buttons_layout .addWidget (self .download_button )
|
||||
|
||||
self .cancel_button =QPushButton ()
|
||||
self .cancel_button .clicked .connect (self .reject )
|
||||
combined_buttons_layout .addWidget (self .cancel_button )
|
||||
|
||||
combined_buttons_layout .addStretch (1 )
|
||||
main_layout .addLayout (combined_buttons_layout )
|
||||
|
||||
self ._retranslate_ui ()
|
||||
if hasattr (self .parent_app ,'get_dark_theme')and self .parent_app .current_theme =="dark":
|
||||
self .setStyleSheet (self .parent_app .get_dark_theme ())
|
||||
|
||||
|
||||
def _logger (self ,message ):
|
||||
"""Helper to log messages, either to parent app or console."""
|
||||
if hasattr (self .parent_app ,'log_signal')and self .parent_app .log_signal :
|
||||
self .parent_app .log_signal .emit (f"[FavArtistsDialog] {message }")
|
||||
else :
|
||||
print (f"[FavArtistsDialog] {message }")
|
||||
|
||||
def _show_content_elements (self ,show ):
|
||||
"""Helper to show/hide content-related widgets."""
|
||||
self .search_input .setVisible (show )
|
||||
self .artist_list_widget .setVisible (show )
|
||||
|
||||
def _fetch_favorite_artists (self ):
|
||||
kemono_fav_url ="https://kemono.su/api/v1/account/favorites?type=artist"
|
||||
coomer_fav_url ="https://coomer.su/api/v1/account/favorites?type=artist"
|
||||
|
||||
self .all_fetched_artists =[]
|
||||
fetched_any_successfully =False
|
||||
errors_occurred =[]
|
||||
any_cookies_loaded_successfully_for_any_source =False
|
||||
|
||||
api_sources =[
|
||||
{"name":"Kemono.su","url":kemono_fav_url ,"domain":"kemono.su"},
|
||||
{"name":"Coomer.su","url":coomer_fav_url ,"domain":"coomer.su"}
|
||||
]
|
||||
|
||||
for source in api_sources :
|
||||
self ._logger (f"Attempting to fetch favorite artists from: {source ['name']} ({source ['url']})")
|
||||
self .status_label .setText (self ._tr ("fav_artists_loading_from_source_status","⏳ Loading favorites from {source_name}...").format (source_name =source ['name']))
|
||||
QCoreApplication .processEvents ()
|
||||
|
||||
cookies_dict_for_source =None
|
||||
if self .cookies_config ['use_cookie']:
|
||||
cookies_dict_for_source =prepare_cookies_for_request (
|
||||
True ,
|
||||
self .cookies_config ['cookie_text'],
|
||||
self .cookies_config ['selected_cookie_file'],
|
||||
self .cookies_config ['app_base_dir'],
|
||||
self ._logger ,
|
||||
target_domain =source ['domain']
|
||||
)
|
||||
if cookies_dict_for_source :
|
||||
any_cookies_loaded_successfully_for_any_source =True
|
||||
else :
|
||||
self ._logger (f"Warning ({source ['name']}): Cookies enabled but could not be loaded for this domain. Fetch might fail if cookies are required.")
|
||||
try :
|
||||
headers ={'User-Agent':'Mozilla/5.0'}
|
||||
response =requests .get (source ['url'],headers =headers ,cookies =cookies_dict_for_source ,timeout =20 )
|
||||
response .raise_for_status ()
|
||||
artists_data_from_api =response .json ()
|
||||
|
||||
if not isinstance (artists_data_from_api ,list ):
|
||||
error_msg =f"Error ({source ['name']}): API did not return a list of artists (got {type (artists_data_from_api )})."
|
||||
self ._logger (error_msg )
|
||||
errors_occurred .append (error_msg )
|
||||
continue
|
||||
|
||||
processed_artists_from_source =0
|
||||
for artist_entry in artists_data_from_api :
|
||||
artist_id =artist_entry .get ("id")
|
||||
artist_name =html .unescape (artist_entry .get ("name","Unknown Artist").strip ())
|
||||
artist_service_platform =artist_entry .get ("service")
|
||||
|
||||
if artist_id and artist_name and artist_service_platform :
|
||||
artist_page_domain =self ._get_domain_for_service (artist_service_platform )
|
||||
full_url =f"https://{artist_page_domain }/{artist_service_platform }/user/{artist_id }"
|
||||
|
||||
self .all_fetched_artists .append ({
|
||||
'name':artist_name ,
|
||||
'url':full_url ,
|
||||
'service':artist_service_platform ,
|
||||
'id':artist_id ,
|
||||
'_source_api':source ['name']
|
||||
})
|
||||
processed_artists_from_source +=1
|
||||
else :
|
||||
self ._logger (f"Warning ({source ['name']}): Skipping favorite artist entry due to missing data: {artist_entry }")
|
||||
|
||||
if processed_artists_from_source >0 :
|
||||
fetched_any_successfully =True
|
||||
self ._logger (f"Fetched {processed_artists_from_source } artists from {source ['name']}.")
|
||||
|
||||
except requests .exceptions .RequestException as e :
|
||||
error_msg =f"Error fetching favorites from {source ['name']}: {e }"
|
||||
self ._logger (error_msg )
|
||||
errors_occurred .append (error_msg )
|
||||
except Exception as e :
|
||||
error_msg =f"An unexpected error occurred with {source ['name']}: {e }"
|
||||
self ._logger (error_msg )
|
||||
errors_occurred .append (error_msg )
|
||||
|
||||
|
||||
if self .cookies_config ['use_cookie']and not any_cookies_loaded_successfully_for_any_source :
|
||||
self .status_label .setText (self ._tr ("fav_artists_cookies_required_status","Error: Cookies enabled but could not be loaded for any source."))
|
||||
self ._logger ("Error: Cookies enabled but no cookies loaded for any source. Showing help dialog.")
|
||||
cookie_help_dialog =CookieHelpDialog (self )
|
||||
cookie_help_dialog .exec_ ()
|
||||
self .download_button .setEnabled (False )
|
||||
if not fetched_any_successfully :
|
||||
errors_occurred .append ("Cookies enabled but could not be loaded for any API source.")
|
||||
|
||||
unique_artists_map ={}
|
||||
for artist in self .all_fetched_artists :
|
||||
key =(artist ['service'].lower (),str (artist ['id']).lower ())
|
||||
if key not in unique_artists_map :
|
||||
unique_artists_map [key ]=artist
|
||||
self .all_fetched_artists =list (unique_artists_map .values ())
|
||||
|
||||
self .all_fetched_artists .sort (key =lambda x :x ['name'].lower ())
|
||||
self ._populate_artist_list_widget ()
|
||||
|
||||
if fetched_any_successfully and self .all_fetched_artists :
|
||||
self .status_label .setText (self ._tr ("fav_artists_found_status","Found {count} total favorite artist(s).").format (count =len (self .all_fetched_artists )))
|
||||
self ._show_content_elements (True )
|
||||
self .download_button .setEnabled (True )
|
||||
elif not fetched_any_successfully and not errors_occurred :
|
||||
self .status_label .setText (self ._tr ("fav_artists_none_found_status","No favorite artists found on Kemono.su or Coomer.su."))
|
||||
self ._show_content_elements (False )
|
||||
self .download_button .setEnabled (False )
|
||||
else :
|
||||
final_error_message =self ._tr ("fav_artists_failed_status","Failed to fetch favorites.")
|
||||
if errors_occurred :
|
||||
final_error_message +=" Errors: "+"; ".join (errors_occurred )
|
||||
self .status_label .setText (final_error_message )
|
||||
self ._show_content_elements (False )
|
||||
self .download_button .setEnabled (False )
|
||||
if fetched_any_successfully and not self .all_fetched_artists :
|
||||
self .status_label .setText (self ._tr ("fav_artists_no_favorites_after_processing","No favorite artists found after processing."))
|
||||
|
||||
def _populate_artist_list_widget (self ,artists_to_display =None ):
|
||||
self .artist_list_widget .clear ()
|
||||
source_list =artists_to_display if artists_to_display is not None else self .all_fetched_artists
|
||||
for artist_data in source_list :
|
||||
item =QListWidgetItem (f"{artist_data ['name']} ({artist_data .get ('service','N/A').capitalize ()})")
|
||||
item .setFlags (item .flags ()|Qt .ItemIsUserCheckable )
|
||||
item .setCheckState (Qt .Unchecked )
|
||||
item .setData (Qt .UserRole ,artist_data )
|
||||
self .artist_list_widget .addItem (item )
|
||||
|
||||
def _filter_artist_list_display (self ):
|
||||
search_text =self .search_input .text ().lower ().strip ()
|
||||
if not search_text :
|
||||
self ._populate_artist_list_widget ()
|
||||
return
|
||||
|
||||
filtered_artists =[
|
||||
artist for artist in self .all_fetched_artists
|
||||
if search_text in artist ['name'].lower ()or search_text in artist ['url'].lower ()
|
||||
]
|
||||
self ._populate_artist_list_widget (filtered_artists )
|
||||
|
||||
def _select_all_items (self ):
|
||||
for i in range (self .artist_list_widget .count ()):
|
||||
self .artist_list_widget .item (i ).setCheckState (Qt .Checked )
|
||||
|
||||
def _deselect_all_items (self ):
|
||||
for i in range (self .artist_list_widget .count ()):
|
||||
self .artist_list_widget .item (i ).setCheckState (Qt .Unchecked )
|
||||
|
||||
def _accept_selection_action (self ):
|
||||
self .selected_artists_data =[]
|
||||
for i in range (self .artist_list_widget .count ()):
|
||||
item =self .artist_list_widget .item (i )
|
||||
if item .checkState ()==Qt .Checked :
|
||||
self .selected_artists_data .append (item .data (Qt .UserRole ))
|
||||
|
||||
if not self .selected_artists_data :
|
||||
QMessageBox .information (self ,"No Selection","Please select at least one artist to download.")
|
||||
return
|
||||
self .accept ()
|
||||
|
||||
def get_selected_artists (self ):
|
||||
return self .selected_artists_data
|
||||
629
src/ui/dialogs/FavoritePostsDialog.py
Normal file
629
src/ui/dialogs/FavoritePostsDialog.py
Normal file
@ -0,0 +1,629 @@
|
||||
# --- Standard Library Imports ---
|
||||
import html
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
import time
|
||||
import traceback
|
||||
import json
|
||||
import re
|
||||
from collections import defaultdict
|
||||
|
||||
# --- Third-Party Library Imports ---
|
||||
import requests
|
||||
from PyQt5.QtCore import QCoreApplication, Qt, pyqtSignal, QThread
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QLineEdit, QListWidget,
|
||||
QListWidgetItem, QMessageBox, QPushButton, QVBoxLayout, QProgressBar,
|
||||
QWidget, QCheckBox
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..assets import get_app_icon_object
|
||||
from ...utils.network_utils import prepare_cookies_for_request
|
||||
# Corrected Import: Import CookieHelpDialog directly from its own module
|
||||
from .CookieHelpDialog import CookieHelpDialog
|
||||
from ...core.api_client import download_from_api
|
||||
|
||||
|
||||
class FavoritePostsFetcherThread (QThread ):
|
||||
"""Worker thread to fetch favorite posts and creator names."""
|
||||
status_update =pyqtSignal (str )
|
||||
progress_bar_update =pyqtSignal (int ,int )
|
||||
finished =pyqtSignal (list ,str )
|
||||
|
||||
def __init__ (self ,cookies_config ,parent_logger_func ,target_domain_preference =None ):
|
||||
super ().__init__ ()
|
||||
self .cookies_config =cookies_config
|
||||
self .parent_logger_func =parent_logger_func
|
||||
self .target_domain_preference =target_domain_preference
|
||||
self .cancellation_event =threading .Event ()
|
||||
self .error_key_map ={
|
||||
"Kemono.su":"kemono_su",
|
||||
"Coomer.su":"coomer_su"
|
||||
}
|
||||
|
||||
def _logger (self ,message ):
|
||||
self .parent_logger_func (f"[FavPostsFetcherThread] {message }")
|
||||
|
||||
def run (self ):
|
||||
kemono_fav_posts_url ="https://kemono.su/api/v1/account/favorites?type=post"
|
||||
coomer_fav_posts_url ="https://coomer.su/api/v1/account/favorites?type=post"
|
||||
|
||||
all_fetched_posts_temp =[]
|
||||
error_messages_for_summary =[]
|
||||
fetched_any_successfully =False
|
||||
any_cookies_loaded_successfully_for_any_source =False
|
||||
|
||||
self .status_update .emit ("key_fetching_fav_post_list_init")
|
||||
self .progress_bar_update .emit (0 ,0 )
|
||||
|
||||
api_sources =[
|
||||
{"name":"Kemono.su","url":kemono_fav_posts_url ,"domain":"kemono.su"},
|
||||
{"name":"Coomer.su","url":coomer_fav_posts_url ,"domain":"coomer.su"}
|
||||
]
|
||||
|
||||
api_sources_to_try =[]
|
||||
if self .target_domain_preference :
|
||||
self ._logger (f"Targeting specific domain for favorites: {self .target_domain_preference }")
|
||||
for source_def in api_sources :
|
||||
if source_def ["domain"]==self .target_domain_preference :
|
||||
api_sources_to_try .append (source_def )
|
||||
break
|
||||
if not api_sources_to_try :
|
||||
self ._logger (f"Warning: Preferred domain '{self .target_domain_preference }' not a recognized API source. Fetching from all.")
|
||||
api_sources_to_try =api_sources
|
||||
else :
|
||||
self ._logger ("No specific domain preference, or both domains have cookies. Will attempt to fetch from all sources.")
|
||||
api_sources_to_try =api_sources
|
||||
|
||||
for source in api_sources_to_try :
|
||||
if self .cancellation_event .is_set ():
|
||||
self .finished .emit ([],"KEY_FETCH_CANCELLED_DURING")
|
||||
return
|
||||
cookies_dict_for_source =None
|
||||
if self .cookies_config ['use_cookie']:
|
||||
cookies_dict_for_source =prepare_cookies_for_request (
|
||||
True ,
|
||||
self .cookies_config ['cookie_text'],
|
||||
self .cookies_config ['selected_cookie_file'],
|
||||
self .cookies_config ['app_base_dir'],
|
||||
self ._logger ,
|
||||
target_domain =source ['domain']
|
||||
)
|
||||
if cookies_dict_for_source :
|
||||
any_cookies_loaded_successfully_for_any_source =True
|
||||
else :
|
||||
self ._logger (f"Warning ({source ['name']}): Cookies enabled but could not be loaded for this domain. Fetch might fail if cookies are required.")
|
||||
|
||||
self ._logger (f"Attempting to fetch favorite posts from: {source ['name']} ({source ['url']})")
|
||||
source_key_part =self .error_key_map .get (source ['name'],source ['name'].lower ().replace ('.','_'))
|
||||
self .status_update .emit (f"key_fetching_from_source_{source_key_part }")
|
||||
QCoreApplication .processEvents ()
|
||||
|
||||
try :
|
||||
headers ={'User-Agent':'Mozilla/5.0'}
|
||||
response =requests .get (source ['url'],headers =headers ,cookies =cookies_dict_for_source ,timeout =20 )
|
||||
response .raise_for_status ()
|
||||
posts_data_from_api =response .json ()
|
||||
|
||||
if not isinstance (posts_data_from_api ,list ):
|
||||
err_detail =f"Error ({source ['name']}): API did not return a list of posts (got {type (posts_data_from_api )})."
|
||||
self ._logger (err_detail )
|
||||
error_messages_for_summary .append (err_detail )
|
||||
continue
|
||||
|
||||
processed_posts_from_source =0
|
||||
for post_entry in posts_data_from_api :
|
||||
post_id =post_entry .get ("id")
|
||||
post_title =html .unescape (post_entry .get ("title","Untitled Post").strip ())
|
||||
service =post_entry .get ("service")
|
||||
creator_id =post_entry .get ("user")
|
||||
added_date_str =post_entry .get ("added",post_entry .get ("published",""))
|
||||
|
||||
if post_id and post_title and service and creator_id :
|
||||
all_fetched_posts_temp .append ({
|
||||
'post_id':post_id ,'title':post_title ,'service':service ,
|
||||
'creator_id':creator_id ,'added_date':added_date_str ,
|
||||
'_source_api':source ['name']
|
||||
})
|
||||
processed_posts_from_source +=1
|
||||
else :
|
||||
self ._logger (f"Warning ({source ['name']}): Skipping favorite post entry due to missing data: {post_entry }")
|
||||
|
||||
if processed_posts_from_source >0 :
|
||||
fetched_any_successfully =True
|
||||
self ._logger (f"Fetched {processed_posts_from_source } posts from {source ['name']}.")
|
||||
|
||||
except requests .exceptions .RequestException as e :
|
||||
err_detail =f"Error fetching favorite posts from {source ['name']}: {e }"
|
||||
self ._logger (err_detail )
|
||||
error_messages_for_summary .append (err_detail )
|
||||
if e .response is not None and e .response .status_code ==401 :
|
||||
self .finished .emit ([],"KEY_AUTH_FAILED")
|
||||
self ._logger (f"Authorization failed for {source ['name']}, emitting KEY_AUTH_FAILED.")
|
||||
return
|
||||
except Exception as e :
|
||||
err_detail =f"An unexpected error occurred with {source ['name']}: {e }"
|
||||
self ._logger (err_detail )
|
||||
error_messages_for_summary .append (err_detail )
|
||||
|
||||
if self .cancellation_event .is_set ():
|
||||
self .finished .emit ([],"KEY_FETCH_CANCELLED_AFTER")
|
||||
return
|
||||
|
||||
|
||||
if self .cookies_config ['use_cookie']and not any_cookies_loaded_successfully_for_any_source :
|
||||
|
||||
if self .target_domain_preference and not any_cookies_loaded_successfully_for_any_source :
|
||||
|
||||
domain_key_part =self .error_key_map .get (self .target_domain_preference ,self .target_domain_preference .lower ().replace ('.','_'))
|
||||
self .finished .emit ([],f"KEY_COOKIES_REQUIRED_BUT_NOT_FOUND_FOR_DOMAIN_{domain_key_part }")
|
||||
return
|
||||
|
||||
|
||||
self .finished .emit ([],"KEY_COOKIES_REQUIRED_BUT_NOT_FOUND_GENERIC")
|
||||
return
|
||||
|
||||
unique_posts_map ={}
|
||||
for post in all_fetched_posts_temp :
|
||||
key =(post ['service'].lower (),str (post ['creator_id']).lower (),str (post ['post_id']).lower ())
|
||||
if key not in unique_posts_map :
|
||||
unique_posts_map [key ]=post
|
||||
all_fetched_posts_temp =list (unique_posts_map .values ())
|
||||
|
||||
all_fetched_posts_temp .sort (key =lambda x :(x .get ('_source_api','').lower (),x .get ('service','').lower (),str (x .get ('creator_id','')).lower (),(x .get ('added_date')or '')),reverse =False )
|
||||
|
||||
if error_messages_for_summary :
|
||||
error_summary_str ="; ".join (error_messages_for_summary )
|
||||
if not fetched_any_successfully :
|
||||
self .finished .emit ([],f"KEY_FETCH_FAILED_GENERIC_{error_summary_str [:50 ]}")
|
||||
else :
|
||||
self .finished .emit (all_fetched_posts_temp ,f"KEY_FETCH_PARTIAL_SUCCESS_{error_summary_str [:50 ]}")
|
||||
elif not all_fetched_posts_temp and not fetched_any_successfully and not self .target_domain_preference :
|
||||
self .finished .emit ([],"KEY_NO_FAVORITES_FOUND_ALL_PLATFORMS")
|
||||
else :
|
||||
self .finished .emit (all_fetched_posts_temp ,"KEY_FETCH_SUCCESS")
|
||||
|
||||
class PostListItemWidget (QWidget ):
|
||||
"""Custom widget for displaying a single post in the FavoritePostsDialog list."""
|
||||
def __init__ (self ,post_data_dict ,parent_dialog_ref ,parent =None ):
|
||||
super ().__init__ (parent )
|
||||
self .post_data =post_data_dict
|
||||
self .parent_dialog =parent_dialog_ref
|
||||
|
||||
self .layout =QHBoxLayout (self )
|
||||
self .layout .setContentsMargins (5 ,3 ,5 ,3 )
|
||||
self .layout .setSpacing (10 )
|
||||
|
||||
self .checkbox =QCheckBox ()
|
||||
self .layout .addWidget (self .checkbox )
|
||||
|
||||
self .info_label =QLabel ()
|
||||
self .info_label .setWordWrap (True )
|
||||
self .info_label .setTextFormat (Qt .RichText )
|
||||
self .layout .addWidget (self .info_label ,1 )
|
||||
|
||||
self ._setup_display_text ()
|
||||
def _setup_display_text (self ):
|
||||
suffix_plain =self .post_data .get ('suffix_for_display',"")
|
||||
title_plain =self .post_data .get ('title','Untitled Post')
|
||||
escaped_suffix =html .escape (suffix_plain )
|
||||
escaped_title =html .escape (title_plain )
|
||||
p_style_paragraph ="font-size:10.5pt; margin:0; padding:0;"
|
||||
title_span_style ="font-weight:bold; color:#E0E0E0;"
|
||||
suffix_span_style ="color:#999999; font-weight:normal; font-size:9.5pt;"
|
||||
|
||||
if escaped_suffix :
|
||||
display_html_content =f"<p style='{p_style_paragraph }'><span style='{title_span_style }'>{escaped_title }</span><span style='{suffix_span_style }'>{escaped_suffix }</span></p>"
|
||||
else :
|
||||
display_html_content =f"<p style='{p_style_paragraph }'><span style='{title_span_style }'>{escaped_title }</span></p>"
|
||||
|
||||
self .info_label .setText (display_html_content )
|
||||
|
||||
def isChecked (self ):return self .checkbox .isChecked ()
|
||||
def setCheckState (self ,state ):self .checkbox .setCheckState (state )
|
||||
def get_post_data (self ):return self .post_data
|
||||
|
||||
class FavoritePostsDialog (QDialog ):
|
||||
"""Dialog to display and select favorite posts."""
|
||||
def __init__ (self ,parent_app ,cookies_config ,known_names_list_ref ,target_domain_preference =None ):
|
||||
super ().__init__ (parent_app )
|
||||
self .parent_app =parent_app
|
||||
self .cookies_config =cookies_config
|
||||
self .all_fetched_posts =[]
|
||||
self .selected_posts_data =[]
|
||||
self .known_names_list_ref =known_names_list_ref
|
||||
self .target_domain_preference_for_this_fetch =target_domain_preference
|
||||
self .creator_name_cache ={}
|
||||
self .displayable_grouped_posts ={}
|
||||
self .fetcher_thread =None
|
||||
|
||||
app_icon =get_app_icon_object ()
|
||||
if not app_icon .isNull ():
|
||||
self .setWindowIcon (app_icon )
|
||||
|
||||
self .setModal (True )
|
||||
self .setMinimumSize (600 ,600 )
|
||||
if hasattr (self .parent_app ,'get_dark_theme'):
|
||||
self .setStyleSheet (self .parent_app .get_dark_theme ())
|
||||
|
||||
self ._init_ui ()
|
||||
self ._load_creator_names_from_file ()
|
||||
self ._retranslate_ui ()
|
||||
self ._start_fetching_favorite_posts ()
|
||||
|
||||
def _update_status_label_from_key (self ,status_key ):
|
||||
"""Translates a status key and updates the status label."""
|
||||
|
||||
translated_status =self ._tr (status_key .lower (),status_key )
|
||||
self .status_label .setText (translated_status )
|
||||
|
||||
def _init_ui (self ):
|
||||
main_layout =QVBoxLayout (self )
|
||||
|
||||
self .status_label =QLabel ()
|
||||
self .status_label .setAlignment (Qt .AlignCenter )
|
||||
main_layout .addWidget (self .status_label )
|
||||
|
||||
self .progress_bar =QProgressBar ()
|
||||
self .progress_bar .setTextVisible (False )
|
||||
self .progress_bar .setVisible (False )
|
||||
main_layout .addWidget (self .progress_bar )
|
||||
|
||||
self .search_input =QLineEdit ()
|
||||
|
||||
self .search_input .textChanged .connect (self ._filter_post_list_display )
|
||||
main_layout .addWidget (self .search_input )
|
||||
|
||||
self .post_list_widget =QListWidget ()
|
||||
self .post_list_widget .setStyleSheet ("""
|
||||
QListWidget::item {
|
||||
border-bottom: 1px solid #4A4A4A;
|
||||
padding-top: 4px;
|
||||
padding-bottom: 4px;
|
||||
}""")
|
||||
self .post_list_widget .setAlternatingRowColors (True )
|
||||
main_layout .addWidget (self .post_list_widget )
|
||||
|
||||
combined_buttons_layout =QHBoxLayout ()
|
||||
self .select_all_button =QPushButton ()
|
||||
self .select_all_button .clicked .connect (self ._select_all_items )
|
||||
combined_buttons_layout .addWidget (self .select_all_button )
|
||||
|
||||
self .deselect_all_button =QPushButton ()
|
||||
self .deselect_all_button .clicked .connect (self ._deselect_all_items )
|
||||
combined_buttons_layout .addWidget (self .deselect_all_button )
|
||||
|
||||
self .download_button =QPushButton ()
|
||||
self .download_button .clicked .connect (self ._accept_selection_action )
|
||||
self .download_button .setEnabled (False )
|
||||
self .download_button .setDefault (True )
|
||||
combined_buttons_layout .addWidget (self .download_button )
|
||||
|
||||
self .cancel_button =QPushButton ()
|
||||
self .cancel_button .clicked .connect (self .reject )
|
||||
combined_buttons_layout .addWidget (self .cancel_button )
|
||||
combined_buttons_layout .addStretch (1 )
|
||||
main_layout .addLayout (combined_buttons_layout )
|
||||
|
||||
def _tr (self ,key ,default_text =""):
|
||||
"""Helper to get translation based on current app language."""
|
||||
if callable (get_translation )and self .parent_app :
|
||||
return get_translation (self .parent_app .current_selected_language ,key ,default_text )
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui (self ):
|
||||
self .setWindowTitle (self ._tr ("fav_posts_dialog_title","Favorite Posts"))
|
||||
self .status_label .setText (self ._tr ("fav_posts_loading_status","Loading favorite posts..."))
|
||||
self .search_input .setPlaceholderText (self ._tr ("fav_posts_search_placeholder","Search posts (title, creator name, ID, service)..."))
|
||||
self .select_all_button .setText (self ._tr ("fav_posts_select_all_button","Select All"))
|
||||
self .deselect_all_button .setText (self ._tr ("fav_posts_deselect_all_button","Deselect All"))
|
||||
self .download_button .setText (self ._tr ("fav_posts_download_selected_button","Download Selected"))
|
||||
self .cancel_button .setText (self ._tr ("fav_posts_cancel_button","Cancel"))
|
||||
|
||||
def _logger (self ,message ):
|
||||
if hasattr (self .parent_app ,'log_signal')and self .parent_app .log_signal :
|
||||
self .parent_app .log_signal .emit (f"[FavPostsDialog] {message }")
|
||||
else :
|
||||
print (f"[FavPostsDialog] {message }")
|
||||
|
||||
def _load_creator_names_from_file (self ):
|
||||
"""Loads creator id-name-service mappings from creators.txt."""
|
||||
self ._logger ("Attempting to load creators.json for Favorite Posts Dialog.")
|
||||
|
||||
if getattr (sys ,'frozen',False )and hasattr (sys ,'_MEIPASS'):
|
||||
base_path_for_creators =sys ._MEIPASS
|
||||
self ._logger (f" Running bundled. Using _MEIPASS: {base_path_for_creators }")
|
||||
else :
|
||||
base_path_for_creators =self .parent_app .app_base_dir
|
||||
self ._logger (f" Not bundled or _MEIPASS unavailable. Using app_base_dir: {base_path_for_creators }")
|
||||
creators_file_path = os.path.join(base_path_for_creators, "data", "creators.json")
|
||||
self ._logger (f"Full path to creators.json: {creators_file_path }")
|
||||
|
||||
if not os .path .exists (creators_file_path ):
|
||||
self ._logger (f"Warning: 'creators.json' not found at {creators_file_path }. Creator names will not be displayed.")
|
||||
return
|
||||
|
||||
try :
|
||||
with open (creators_file_path ,'r',encoding ='utf-8')as f :
|
||||
loaded_data =json .load (f )
|
||||
|
||||
if isinstance (loaded_data ,list )and len (loaded_data )>0 and isinstance (loaded_data [0 ],list ):
|
||||
creators_list =loaded_data [0 ]
|
||||
elif isinstance (loaded_data ,list )and all (isinstance (item ,dict )for item in loaded_data ):
|
||||
creators_list =loaded_data
|
||||
else :
|
||||
self ._logger (f"Warning: 'creators.json' has an unexpected format. Expected a list of lists or a flat list of creator objects.")
|
||||
return
|
||||
|
||||
for creator_data in creators_list :
|
||||
creator_id =creator_data .get ("id")
|
||||
name =creator_data .get ("name")
|
||||
service =creator_data .get ("service")
|
||||
if creator_id and name and service :
|
||||
self .creator_name_cache [(service .lower (),str (creator_id ))]=name
|
||||
self ._logger (f"Successfully loaded {len (self .creator_name_cache )} creator names from 'creators.json'.")
|
||||
except Exception as e :
|
||||
self ._logger (f"Error loading 'creators.json': {e }")
|
||||
|
||||
def _start_fetching_favorite_posts (self ):
|
||||
self .download_button .setEnabled (False )
|
||||
self .status_label .setText ("Initializing favorite posts fetch...")
|
||||
|
||||
self .fetcher_thread =FavoritePostsFetcherThread (
|
||||
self .cookies_config ,
|
||||
self .parent_app .log_signal .emit ,
|
||||
target_domain_preference =self .target_domain_preference_for_this_fetch
|
||||
)
|
||||
self .fetcher_thread .status_update .connect (self ._update_status_label_from_key )
|
||||
self .fetcher_thread .finished .connect (self ._on_fetch_completed )
|
||||
self .fetcher_thread .progress_bar_update .connect (self ._set_progress_bar_value )
|
||||
self .progress_bar .setVisible (True )
|
||||
self .fetcher_thread .start ()
|
||||
|
||||
def _set_progress_bar_value (self ,value ,maximum ):
|
||||
if maximum ==0 :
|
||||
self .progress_bar .setRange (0 ,0 )
|
||||
self .progress_bar .setValue (0 )
|
||||
else :
|
||||
self .progress_bar .setRange (0 ,maximum )
|
||||
self .progress_bar .setValue (value )
|
||||
|
||||
def _on_fetch_completed (self ,fetched_posts_list ,status_key ):
|
||||
self .progress_bar .setVisible (False )
|
||||
|
||||
proceed_to_display_posts =False
|
||||
show_error_message_box =False
|
||||
message_box_title_key ="fav_posts_fetch_error_title"
|
||||
message_box_text_key ="fav_posts_fetch_error_message"
|
||||
message_box_params ={'domain':self .target_domain_preference_for_this_fetch or "platform",'error_message_part':""}
|
||||
status_label_text_key =None
|
||||
|
||||
if status_key =="KEY_FETCH_SUCCESS":
|
||||
proceed_to_display_posts =True
|
||||
elif status_key and status_key .startswith ("KEY_FETCH_PARTIAL_SUCCESS_")and fetched_posts_list :
|
||||
displayable_detail =status_key .replace ("KEY_FETCH_PARTIAL_SUCCESS_","").replace ("_"," ")
|
||||
self ._logger (f"Partial success with posts: {status_key } -> {displayable_detail }")
|
||||
|
||||
|
||||
proceed_to_display_posts =True
|
||||
elif status_key :
|
||||
specific_domain_msg_part =f" for {self .target_domain_preference_for_this_fetch }"if self .target_domain_preference_for_this_fetch else ""
|
||||
|
||||
if status_key .startswith ("KEY_COOKIES_REQUIRED_BUT_NOT_FOUND_FOR_DOMAIN_")or status_key =="KEY_COOKIES_REQUIRED_BUT_NOT_FOUND_GENERIC":
|
||||
status_label_text_key ="fav_posts_cookies_required_error"
|
||||
self ._logger (f"Cookie error: {status_key }. Showing help dialog.")
|
||||
cookie_help_dialog =CookieHelpDialog (self )
|
||||
cookie_help_dialog .exec_ ()
|
||||
elif status_key =="KEY_AUTH_FAILED":
|
||||
status_label_text_key ="fav_posts_auth_failed_title"
|
||||
self ._logger (f"Auth error: {status_key }. Showing help dialog.")
|
||||
QMessageBox .warning (self ,self ._tr ("fav_posts_auth_failed_title","Authorization Failed (Posts)"),
|
||||
self ._tr ("fav_posts_auth_failed_message_generic","...").format (domain_specific_part =specific_domain_msg_part ))
|
||||
cookie_help_dialog =CookieHelpDialog (self )
|
||||
cookie_help_dialog .exec_ ()
|
||||
elif status_key =="KEY_NO_FAVORITES_FOUND_ALL_PLATFORMS":
|
||||
status_label_text_key ="fav_posts_no_posts_found_status"
|
||||
self ._logger (status_key )
|
||||
elif status_key .startswith ("KEY_FETCH_CANCELLED"):
|
||||
status_label_text_key ="fav_posts_fetch_cancelled_status"
|
||||
self ._logger (status_key )
|
||||
else :
|
||||
displayable_error_detail =status_key
|
||||
if status_key .startswith ("KEY_FETCH_FAILED_GENERIC_"):
|
||||
displayable_error_detail =status_key .replace ("KEY_FETCH_FAILED_GENERIC_","").replace ("_"," ")
|
||||
elif status_key .startswith ("KEY_FETCH_PARTIAL_SUCCESS_"):
|
||||
displayable_error_detail =status_key .replace ("KEY_FETCH_PARTIAL_SUCCESS_","Partial success but no posts: ").replace ("_"," ")
|
||||
|
||||
message_box_params ['error_message_part']=f":\n\n{displayable_error_detail }"if displayable_error_detail else ""
|
||||
status_label_text_key ="fav_posts_fetch_error_message"
|
||||
show_error_message_box =True
|
||||
self ._logger (f"Fetch error: {status_key } -> {displayable_error_detail }")
|
||||
|
||||
if status_label_text_key :
|
||||
self .status_label .setText (self ._tr (status_label_text_key ,status_label_text_key ).format (**message_box_params ))
|
||||
if show_error_message_box :
|
||||
QMessageBox .critical (self ,self ._tr (message_box_title_key ),self ._tr (message_box_text_key ).format (**message_box_params ))
|
||||
|
||||
self .download_button .setEnabled (False )
|
||||
return
|
||||
|
||||
|
||||
if not proceed_to_display_posts :
|
||||
if not status_label_text_key :
|
||||
self .status_label .setText (self ._tr ("fav_posts_cookies_required_error","Error: Cookies are required for favorite posts but could not be loaded."))
|
||||
self .download_button .setEnabled (False )
|
||||
return
|
||||
|
||||
if not self .creator_name_cache :
|
||||
self ._logger ("Warning: Creator name cache is empty. Names will not be resolved from creators.json. Displaying IDs instead.")
|
||||
else :
|
||||
self ._logger (f"Creator name cache has {len (self .creator_name_cache )} entries. Attempting to resolve names...")
|
||||
sample_keys =list (self .creator_name_cache .keys ())[:3 ]
|
||||
if sample_keys :
|
||||
self ._logger (f"Sample keys from creator_name_cache: {sample_keys }")
|
||||
|
||||
|
||||
processed_one_missing_log =False
|
||||
for post_entry in fetched_posts_list :
|
||||
service_from_post =post_entry .get ('service','')
|
||||
creator_id_from_post =post_entry .get ('creator_id','')
|
||||
|
||||
lookup_key_service =service_from_post .lower ()
|
||||
lookup_key_id =str (creator_id_from_post )
|
||||
lookup_key_tuple =(lookup_key_service ,lookup_key_id )
|
||||
|
||||
resolved_name =self .creator_name_cache .get (lookup_key_tuple )
|
||||
|
||||
if resolved_name :
|
||||
post_entry ['creator_name_resolved']=resolved_name
|
||||
else :
|
||||
post_entry ['creator_name_resolved']=str (creator_id_from_post )
|
||||
if not processed_one_missing_log and self .creator_name_cache :
|
||||
self ._logger (f"Debug: Name not found for key {lookup_key_tuple }. Using ID '{creator_id_from_post }'.")
|
||||
processed_one_missing_log =True
|
||||
|
||||
self .all_fetched_posts =fetched_posts_list
|
||||
|
||||
if not self .all_fetched_posts :
|
||||
self .status_label .setText (self ._tr ("fav_posts_no_posts_found_status","No favorite posts found."))
|
||||
self .download_button .setEnabled (False )
|
||||
return
|
||||
|
||||
try :
|
||||
self ._populate_post_list_widget ()
|
||||
self .status_label .setText (self ._tr ("fav_posts_found_status","{count} favorite post(s) found.").format (count =len (self .all_fetched_posts )))
|
||||
self .download_button .setEnabled (True )
|
||||
except Exception as e :
|
||||
self .status_label .setText (self ._tr ("fav_posts_display_error_status","Error displaying posts: {error}").format (error =str (e )))
|
||||
self ._logger (f"Error during _populate_post_list_widget: {e }\n{traceback .format_exc (limit =3 )}")
|
||||
QMessageBox .critical (self ,self ._tr ("fav_posts_ui_error_title","UI Error"),self ._tr ("fav_posts_ui_error_message","Could not display favorite posts: {error}").format (error =str (e )))
|
||||
self .download_button .setEnabled (False )
|
||||
|
||||
|
||||
def _find_best_known_name_match_in_title (self ,title_raw ):
|
||||
if not title_raw or not self .known_names_list_ref :
|
||||
return None
|
||||
|
||||
title_lower =title_raw .lower ()
|
||||
best_match_known_name_primary =None
|
||||
longest_match_len =0
|
||||
|
||||
for known_entry in self .known_names_list_ref :
|
||||
aliases_to_check =set ()
|
||||
for alias_val in known_entry .get ("aliases",[]):
|
||||
aliases_to_check .add (alias_val )
|
||||
if not known_entry .get ("is_group",False ):
|
||||
aliases_to_check .add (known_entry ["name"])
|
||||
sorted_aliases_for_entry =sorted (list (aliases_to_check ),key =len ,reverse =True )
|
||||
|
||||
for alias in sorted_aliases_for_entry :
|
||||
alias_lower =alias .lower ()
|
||||
if not alias_lower :
|
||||
continue
|
||||
if re .search (r'\b'+re .escape (alias_lower )+r'\b',title_lower ):
|
||||
if len (alias_lower )>longest_match_len :
|
||||
longest_match_len =len (alias_lower )
|
||||
best_match_known_name_primary =known_entry ["name"]
|
||||
break
|
||||
return best_match_known_name_primary
|
||||
|
||||
def _populate_post_list_widget (self ,posts_to_display =None ):
|
||||
self .post_list_widget .clear ()
|
||||
|
||||
source_list_for_grouping =posts_to_display if posts_to_display is not None else self .all_fetched_posts
|
||||
grouped_posts ={}
|
||||
for post in source_list_for_grouping :
|
||||
service =post .get ('service','unknown_service')
|
||||
creator_id =post .get ('creator_id','unknown_id')
|
||||
group_key =(service ,creator_id )
|
||||
if group_key not in grouped_posts :
|
||||
grouped_posts [group_key ]=[]
|
||||
grouped_posts [group_key ].append (post )
|
||||
|
||||
sorted_group_keys =sorted (grouped_posts .keys (),key =lambda x :(x [0 ].lower (),x [1 ].lower ()))
|
||||
|
||||
self .displayable_grouped_posts ={
|
||||
key :sorted (grouped_posts [key ],key =lambda p :(p .get ('added_date')or ''),reverse =True )
|
||||
for key in sorted_group_keys
|
||||
}
|
||||
for service ,creator_id_val in sorted_group_keys :
|
||||
creator_name_display =self .creator_name_cache .get (
|
||||
(service .lower (),str (creator_id_val )),
|
||||
str (creator_id_val )
|
||||
)
|
||||
artist_header_display_text =f"{creator_name_display } ({service .capitalize ()} / {creator_id_val })"
|
||||
artist_header_item =QListWidgetItem (f"🎨 {artist_header_display_text }")
|
||||
artist_header_item .setFlags (Qt .NoItemFlags )
|
||||
font =artist_header_item .font ()
|
||||
font .setBold (True )
|
||||
font .setPointSize (font .pointSize ()+1 )
|
||||
artist_header_item .setFont (font )
|
||||
artist_header_item .setForeground (Qt .cyan )
|
||||
self .post_list_widget .addItem (artist_header_item )
|
||||
for post_data in self .displayable_grouped_posts [(service ,creator_id_val )]:
|
||||
post_title_raw =post_data .get ('title','Untitled Post')
|
||||
found_known_name_primary =self ._find_best_known_name_match_in_title (post_title_raw )
|
||||
|
||||
plain_text_title_for_list_item =post_title_raw
|
||||
if found_known_name_primary :
|
||||
suffix_text =f" [Known - {found_known_name_primary }]"
|
||||
post_data ['suffix_for_display']=suffix_text
|
||||
plain_text_title_for_list_item =post_title_raw +suffix_text
|
||||
else :
|
||||
post_data .pop ('suffix_for_display',None )
|
||||
|
||||
list_item =QListWidgetItem (self .post_list_widget )
|
||||
list_item .setText (plain_text_title_for_list_item )
|
||||
list_item .setFlags (list_item .flags ()|Qt .ItemIsUserCheckable )
|
||||
list_item .setCheckState (Qt .Unchecked )
|
||||
list_item .setData (Qt .UserRole ,post_data )
|
||||
self .post_list_widget .addItem (list_item )
|
||||
|
||||
def _filter_post_list_display (self ):
|
||||
search_text =self .search_input .text ().lower ().strip ()
|
||||
if not search_text :
|
||||
self ._populate_post_list_widget (self .all_fetched_posts )
|
||||
return
|
||||
|
||||
filtered_posts_to_group =[]
|
||||
for post in self .all_fetched_posts :
|
||||
matches_post_title =search_text in post .get ('title','').lower ()
|
||||
matches_creator_name =search_text in post .get ('creator_name_resolved','').lower ()
|
||||
matches_creator_id =search_text in post .get ('creator_id','').lower ()
|
||||
matches_service =search_text in post ['service'].lower ()
|
||||
|
||||
if matches_post_title or matches_creator_name or matches_creator_id or matches_service :
|
||||
filtered_posts_to_group .append (post )
|
||||
|
||||
self ._populate_post_list_widget (filtered_posts_to_group )
|
||||
|
||||
def _select_all_items (self ):
|
||||
for i in range (self .post_list_widget .count ()):
|
||||
item =self .post_list_widget .item (i )
|
||||
if item and item .flags ()&Qt .ItemIsUserCheckable :
|
||||
item .setCheckState (Qt .Checked )
|
||||
|
||||
def _deselect_all_items (self ):
|
||||
for i in range (self .post_list_widget .count ()):
|
||||
item =self .post_list_widget .item (i )
|
||||
if item and item .flags ()&Qt .ItemIsUserCheckable :
|
||||
item .setCheckState (Qt .Unchecked )
|
||||
|
||||
def _accept_selection_action (self ):
|
||||
self .selected_posts_data =[]
|
||||
for i in range (self .post_list_widget .count ()):
|
||||
item =self .post_list_widget .item (i )
|
||||
if item and item .checkState ()==Qt .Checked :
|
||||
post_data_for_download =item .data (Qt .UserRole )
|
||||
self .selected_posts_data .append (post_data_for_download )
|
||||
|
||||
if not self .selected_posts_data :
|
||||
QMessageBox .information (self ,self ._tr ("fav_posts_no_selection_title","No Selection"),self ._tr ("fav_posts_no_selection_message","Please select at least one post to download."))
|
||||
return
|
||||
self .accept ()
|
||||
|
||||
def get_selected_posts (self ):
|
||||
return self .selected_posts_data
|
||||
202
src/ui/dialogs/FutureSettingsDialog.py
Normal file
202
src/ui/dialogs/FutureSettingsDialog.py
Normal file
@ -0,0 +1,202 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import Qt, QStandardPaths
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout,
|
||||
QGroupBox, QComboBox, QMessageBox
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
# This assumes the new project structure is in place.
|
||||
from ...i18n.translator import get_translation
|
||||
from ..main_window import get_app_icon_object
|
||||
from ...config.constants import (
|
||||
THEME_KEY, LANGUAGE_KEY, DOWNLOAD_LOCATION_KEY
|
||||
)
|
||||
|
||||
|
||||
class FutureSettingsDialog(QDialog):
|
||||
"""
|
||||
A dialog for managing application-wide settings like theme, language,
|
||||
and saving the default download path.
|
||||
"""
|
||||
def __init__(self, parent_app_ref, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
parent_app_ref (DownloaderApp): A reference to the main application window.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app_ref
|
||||
self.setModal(True)
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically
|
||||
screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
|
||||
scale_factor = screen_height / 768.0
|
||||
base_min_w, base_min_h = 380, 250
|
||||
scaled_min_w = int(base_min_w * scale_factor)
|
||||
scaled_min_h = int(base_min_h * scale_factor)
|
||||
self.setMinimumSize(scaled_min_w, scaled_min_h)
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
layout = QVBoxLayout(self)
|
||||
|
||||
# --- Appearance Settings ---
|
||||
self.appearance_group_box = QGroupBox()
|
||||
appearance_layout = QVBoxLayout(self.appearance_group_box)
|
||||
self.theme_toggle_button = QPushButton()
|
||||
self.theme_toggle_button.clicked.connect(self._toggle_theme)
|
||||
appearance_layout.addWidget(self.theme_toggle_button)
|
||||
layout.addWidget(self.appearance_group_box)
|
||||
|
||||
# --- Language Settings ---
|
||||
self.language_group_box = QGroupBox()
|
||||
language_group_layout = QVBoxLayout(self.language_group_box)
|
||||
self.language_selection_layout = QHBoxLayout()
|
||||
self.language_label = QLabel()
|
||||
self.language_selection_layout.addWidget(self.language_label)
|
||||
self.language_combo_box = QComboBox()
|
||||
self.language_combo_box.currentIndexChanged.connect(self._language_selection_changed)
|
||||
self.language_selection_layout.addWidget(self.language_combo_box, 1)
|
||||
language_group_layout.addLayout(self.language_selection_layout)
|
||||
layout.addWidget(self.language_group_box)
|
||||
|
||||
# --- Download Settings ---
|
||||
self.download_settings_group_box = QGroupBox()
|
||||
download_settings_layout = QVBoxLayout(self.download_settings_group_box)
|
||||
self.save_path_button = QPushButton()
|
||||
self.save_path_button.clicked.connect(self._save_download_path)
|
||||
download_settings_layout.addWidget(self.save_path_button)
|
||||
layout.addWidget(self.download_settings_group_box)
|
||||
|
||||
layout.addStretch(1)
|
||||
|
||||
# --- OK Button ---
|
||||
self.ok_button = QPushButton()
|
||||
self.ok_button.clicked.connect(self.accept)
|
||||
layout.addWidget(self.ok_button, 0, Qt.AlignRight | Qt.AlignBottom)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("settings_dialog_title", "Settings"))
|
||||
self.appearance_group_box.setTitle(self._tr("appearance_group_title", "Appearance"))
|
||||
self.language_group_box.setTitle(self._tr("language_group_title", "Language Settings"))
|
||||
self.download_settings_group_box.setTitle(self._tr("settings_download_group_title", "Download Settings"))
|
||||
self.language_label.setText(self._tr("language_label", "Language:"))
|
||||
self._update_theme_toggle_button_text()
|
||||
self._populate_language_combo_box()
|
||||
|
||||
self.save_path_button.setText(self._tr("settings_save_path_button", "Save Current Download Path"))
|
||||
self.save_path_button.setToolTip(self._tr("settings_save_path_tooltip", "Save the current 'Download Location' for future sessions."))
|
||||
self.ok_button.setText(self._tr("ok_button", "OK"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app.current_theme == "dark":
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
else:
|
||||
self.setStyleSheet("")
|
||||
|
||||
def _update_theme_toggle_button_text(self):
|
||||
"""Updates the theme button text and tooltip based on the current theme."""
|
||||
if self.parent_app.current_theme == "dark":
|
||||
self.theme_toggle_button.setText(self._tr("theme_toggle_light", "Switch to Light Mode"))
|
||||
self.theme_toggle_button.setToolTip(self._tr("theme_tooltip_light", "Change the application appearance to light."))
|
||||
else:
|
||||
self.theme_toggle_button.setText(self._tr("theme_toggle_dark", "Switch to Dark Mode"))
|
||||
self.theme_toggle_button.setToolTip(self._tr("theme_tooltip_dark", "Change the application appearance to dark."))
|
||||
|
||||
def _toggle_theme(self):
|
||||
"""Toggles the application theme and updates the UI."""
|
||||
new_theme = "light" if self.parent_app.current_theme == "dark" else "dark"
|
||||
self.parent_app.apply_theme(new_theme)
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _populate_language_combo_box(self):
|
||||
"""Populates the language dropdown with available languages."""
|
||||
self.language_combo_box.blockSignals(True)
|
||||
self.language_combo_box.clear()
|
||||
languages = [
|
||||
("en","English"),
|
||||
("ja","日本語 (Japanese)"),
|
||||
("fr","Français (French)"),
|
||||
("de","Deutsch (German)"),
|
||||
("es","Español (Spanish)"),
|
||||
("pt","Português (Portuguese)"),
|
||||
("ru","Русский (Russian)"),
|
||||
("zh_CN","简体中文 (Simplified Chinese)"),
|
||||
("zh_TW","繁體中文 (Traditional Chinese)"),
|
||||
("ko","한국어 (Korean)")
|
||||
]
|
||||
for lang_code, lang_name in languages:
|
||||
self.language_combo_box.addItem(lang_name, lang_code)
|
||||
if self.parent_app.current_selected_language == lang_code:
|
||||
self.language_combo_box.setCurrentIndex(self.language_combo_box.count() - 1)
|
||||
self.language_combo_box.blockSignals(False)
|
||||
|
||||
def _language_selection_changed(self, index):
|
||||
"""Handles the user selecting a new language."""
|
||||
selected_lang_code = self.language_combo_box.itemData(index)
|
||||
if selected_lang_code and selected_lang_code != self.parent_app.current_selected_language:
|
||||
self.parent_app.current_selected_language = selected_lang_code
|
||||
self.parent_app.settings.setValue(LANGUAGE_KEY, selected_lang_code)
|
||||
self.parent_app.settings.sync()
|
||||
|
||||
self._retranslate_ui()
|
||||
|
||||
msg_box = QMessageBox(self)
|
||||
msg_box.setIcon(QMessageBox.Information)
|
||||
msg_box.setWindowTitle(self._tr("language_change_title", "Language Changed"))
|
||||
msg_box.setText(self._tr("language_change_message", "A restart is required..."))
|
||||
msg_box.setInformativeText(self._tr("language_change_informative", "Would you like to restart now?"))
|
||||
restart_button = msg_box.addButton(self._tr("restart_now_button", "Restart Now"), QMessageBox.ApplyRole)
|
||||
ok_button = msg_box.addButton(self._tr("ok_button", "OK"), QMessageBox.AcceptRole)
|
||||
msg_box.setDefaultButton(ok_button)
|
||||
msg_box.exec_()
|
||||
|
||||
if msg_box.clickedButton() == restart_button:
|
||||
self.parent_app._request_restart_application()
|
||||
|
||||
def _save_download_path(self):
|
||||
"""Saves the current download path from the main window to settings."""
|
||||
if hasattr(self.parent_app, 'dir_input') and self.parent_app.dir_input:
|
||||
current_path = self.parent_app.dir_input.text().strip()
|
||||
if current_path:
|
||||
if os.path.isdir(current_path):
|
||||
self.parent_app.settings.setValue(DOWNLOAD_LOCATION_KEY, current_path)
|
||||
self.parent_app.settings.sync()
|
||||
QMessageBox.information(self,
|
||||
self._tr("settings_save_path_success_title", "Path Saved"),
|
||||
self._tr("settings_save_path_success_message", "Download location '{path}' saved.").format(path=current_path))
|
||||
else:
|
||||
QMessageBox.warning(self,
|
||||
self._tr("settings_save_path_invalid_title", "Invalid Path"),
|
||||
self._tr("settings_save_path_invalid_message", "The path '{path}' is not a valid directory.").format(path=current_path))
|
||||
else:
|
||||
QMessageBox.warning(self,
|
||||
self._tr("settings_save_path_empty_title", "Empty Path"),
|
||||
self._tr("settings_save_path_empty_message", "Download location cannot be empty."))
|
||||
else:
|
||||
QMessageBox.critical(self, "Error", "Could not access download path input from main application.")
|
||||
192
src/ui/dialogs/HelpGuideDialog.py
Normal file
192
src/ui/dialogs/HelpGuideDialog.py
Normal file
@ -0,0 +1,192 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import sys
|
||||
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import QUrl, QSize, Qt
|
||||
from PyQt5.QtGui import QIcon
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout,
|
||||
QStackedWidget, QScrollArea, QFrame, QWidget
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
|
||||
class TourStepWidget(QWidget):
|
||||
"""
|
||||
A custom widget representing a single step or page in the feature guide.
|
||||
It neatly formats a title and its corresponding content.
|
||||
"""
|
||||
def __init__(self, title_text, content_text, parent=None):
|
||||
super().__init__(parent)
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(20, 20, 20, 20)
|
||||
layout.setSpacing(10)
|
||||
|
||||
title_label = QLabel(title_text)
|
||||
title_label.setAlignment(Qt.AlignCenter)
|
||||
title_label.setStyleSheet("font-size: 18px; font-weight: bold; color: #E0E0E0; padding-bottom: 15px;")
|
||||
layout.addWidget(title_label)
|
||||
|
||||
scroll_area = QScrollArea()
|
||||
scroll_area.setWidgetResizable(True)
|
||||
scroll_area.setFrameShape(QFrame.NoFrame)
|
||||
scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
|
||||
scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
|
||||
scroll_area.setStyleSheet("background-color: transparent;")
|
||||
|
||||
content_label = QLabel(content_text)
|
||||
content_label.setWordWrap(True)
|
||||
content_label.setAlignment(Qt.AlignLeft | Qt.AlignTop)
|
||||
content_label.setTextFormat(Qt.RichText)
|
||||
content_label.setOpenExternalLinks(True) # Allow opening links in the content
|
||||
content_label.setStyleSheet("font-size: 11pt; color: #C8C8C8; line-height: 1.8;")
|
||||
scroll_area.setWidget(content_label)
|
||||
layout.addWidget(scroll_area, 1)
|
||||
|
||||
|
||||
class HelpGuideDialog (QDialog ):
|
||||
"""A multi-page dialog for displaying the feature guide."""
|
||||
def __init__ (self ,steps_data ,parent_app ,parent =None ):
|
||||
super ().__init__ (parent )
|
||||
self .current_step =0
|
||||
self .steps_data =steps_data
|
||||
self .parent_app =parent_app
|
||||
|
||||
app_icon =get_app_icon_object ()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
self .setModal (True )
|
||||
self .setFixedSize (650 ,600 )
|
||||
|
||||
|
||||
current_theme_style =""
|
||||
if hasattr (self .parent_app ,'current_theme')and self .parent_app .current_theme =="dark":
|
||||
if hasattr (self .parent_app ,'get_dark_theme'):
|
||||
current_theme_style =self .parent_app .get_dark_theme ()
|
||||
|
||||
|
||||
self .setStyleSheet (current_theme_style if current_theme_style else """
|
||||
QDialog { background-color: #2E2E2E; border: 1px solid #5A5A5A; }
|
||||
QLabel { color: #E0E0E0; }
|
||||
QPushButton { background-color: #555; color: #F0F0F0; border: 1px solid #6A6A6A; padding: 8px 15px; border-radius: 4px; min-height: 25px; font-size: 11pt; }
|
||||
QPushButton:hover { background-color: #656565; }
|
||||
QPushButton:pressed { background-color: #4A4A4A; }
|
||||
""")
|
||||
self ._init_ui ()
|
||||
if self .parent_app :
|
||||
self .move (self .parent_app .geometry ().center ()-self .rect ().center ())
|
||||
|
||||
def _tr (self ,key ,default_text =""):
|
||||
"""Helper to get translation based on current app language."""
|
||||
if callable (get_translation )and self .parent_app :
|
||||
return get_translation (self .parent_app .current_selected_language ,key ,default_text )
|
||||
return default_text
|
||||
|
||||
|
||||
def _init_ui (self ):
|
||||
main_layout =QVBoxLayout (self )
|
||||
main_layout .setContentsMargins (0 ,0 ,0 ,0 )
|
||||
main_layout .setSpacing (0 )
|
||||
|
||||
self .stacked_widget =QStackedWidget ()
|
||||
main_layout .addWidget (self .stacked_widget ,1 )
|
||||
|
||||
self .tour_steps_widgets =[]
|
||||
for title ,content in self .steps_data :
|
||||
step_widget =TourStepWidget (title ,content )
|
||||
self .tour_steps_widgets .append (step_widget )
|
||||
self .stacked_widget .addWidget (step_widget )
|
||||
|
||||
self .setWindowTitle (self ._tr ("help_guide_dialog_title","Kemono Downloader - Feature Guide"))
|
||||
|
||||
buttons_layout =QHBoxLayout ()
|
||||
buttons_layout .setContentsMargins (15 ,10 ,15 ,15 )
|
||||
buttons_layout .setSpacing (10 )
|
||||
|
||||
self .back_button =QPushButton (self ._tr ("tour_dialog_back_button","Back"))
|
||||
self .back_button .clicked .connect (self ._previous_step )
|
||||
self .back_button .setEnabled (False )
|
||||
|
||||
if getattr (sys ,'frozen',False )and hasattr (sys ,'_MEIPASS'):
|
||||
assets_base_dir =sys ._MEIPASS
|
||||
else :
|
||||
# Go up three levels from this file's directory (src/ui/dialogs) to the project root
|
||||
assets_base_dir =os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
||||
|
||||
github_icon_path =os .path .join (assets_base_dir ,"assets","github.png")
|
||||
instagram_icon_path =os .path .join (assets_base_dir ,"assets","instagram.png")
|
||||
discord_icon_path =os .path .join (assets_base_dir ,"assets","discord.png")
|
||||
|
||||
self .github_button =QPushButton (QIcon (github_icon_path ),"")
|
||||
self .instagram_button =QPushButton (QIcon (instagram_icon_path ),"")
|
||||
self .Discord_button =QPushButton (QIcon (discord_icon_path ),"")
|
||||
|
||||
icon_size =QSize (24 ,24 )
|
||||
self .github_button .setIconSize (icon_size )
|
||||
self .instagram_button .setIconSize (icon_size )
|
||||
self .Discord_button .setIconSize (icon_size )
|
||||
|
||||
self .next_button =QPushButton (self ._tr ("tour_dialog_next_button","Next"))
|
||||
self .next_button .clicked .connect (self ._next_step_action )
|
||||
self .next_button .setDefault (True )
|
||||
self .github_button .clicked .connect (self ._open_github_link )
|
||||
self .instagram_button .clicked .connect (self ._open_instagram_link )
|
||||
self .Discord_button .clicked .connect (self ._open_Discord_link )
|
||||
self .github_button .setToolTip (self ._tr ("help_guide_github_tooltip","Visit project's GitHub page (Opens in browser)"))
|
||||
self .instagram_button .setToolTip (self ._tr ("help_guide_instagram_tooltip","Visit our Instagram page (Opens in browser)"))
|
||||
self .Discord_button .setToolTip (self ._tr ("help_guide_discord_tooltip","Visit our Discord community (Opens in browser)"))
|
||||
|
||||
|
||||
social_layout =QHBoxLayout ()
|
||||
social_layout .setSpacing (10 )
|
||||
social_layout .addWidget (self .github_button )
|
||||
social_layout .addWidget (self .instagram_button )
|
||||
social_layout .addWidget (self .Discord_button )
|
||||
|
||||
while buttons_layout .count ():
|
||||
item =buttons_layout .takeAt (0 )
|
||||
if item .widget ():
|
||||
item .widget ().setParent (None )
|
||||
elif item .layout ():
|
||||
pass
|
||||
buttons_layout .addLayout (social_layout )
|
||||
buttons_layout .addStretch (1 )
|
||||
buttons_layout .addWidget (self .back_button )
|
||||
buttons_layout .addWidget (self .next_button )
|
||||
main_layout .addLayout (buttons_layout )
|
||||
self ._update_button_states ()
|
||||
|
||||
def _next_step_action (self ):
|
||||
if self .current_step <len (self .tour_steps_widgets )-1 :
|
||||
self .current_step +=1
|
||||
self .stacked_widget .setCurrentIndex (self .current_step )
|
||||
else :
|
||||
self .accept ()
|
||||
self ._update_button_states ()
|
||||
|
||||
def _previous_step (self ):
|
||||
if self .current_step >0 :
|
||||
self .current_step -=1
|
||||
self .stacked_widget .setCurrentIndex (self .current_step )
|
||||
self ._update_button_states ()
|
||||
|
||||
def _update_button_states (self ):
|
||||
if self .current_step ==len (self .tour_steps_widgets )-1 :
|
||||
self .next_button .setText (self ._tr ("tour_dialog_finish_button","Finish"))
|
||||
else :
|
||||
self .next_button .setText (self ._tr ("tour_dialog_next_button","Next"))
|
||||
self .back_button .setEnabled (self .current_step >0 )
|
||||
|
||||
def _open_github_link (self ):
|
||||
QDesktopServices .openUrl (QUrl ("https://github.com/Yuvi9587"))
|
||||
|
||||
def _open_instagram_link (self ):
|
||||
QDesktopServices .openUrl (QUrl ("https://www.instagram.com/uvi.arts/"))
|
||||
|
||||
def _open_Discord_link (self ):
|
||||
QDesktopServices .openUrl (QUrl ("https://discord.gg/BqP64XTdJN"))
|
||||
150
src/ui/dialogs/KnownNamesFilterDialog.py
Normal file
150
src/ui/dialogs/KnownNamesFilterDialog.py
Normal file
@ -0,0 +1,150 @@
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import Qt
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QLineEdit, QListWidget,
|
||||
QListWidgetItem, QPushButton, QVBoxLayout
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..main_window import get_app_icon_object
|
||||
|
||||
|
||||
class KnownNamesFilterDialog(QDialog):
|
||||
"""
|
||||
A dialog to select names from the Known.txt list to add to the main
|
||||
character filter input field. This provides a convenient way for users
|
||||
|
||||
to reuse their saved names and groups for filtering downloads.
|
||||
"""
|
||||
|
||||
def __init__(self, known_names_list, parent_app_ref, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
known_names_list (list): A list of known name objects (dicts) from Known.txt.
|
||||
parent_app_ref (DownloaderApp): A reference to the main application window.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.parent_app = parent_app_ref
|
||||
self.setModal(True)
|
||||
self.all_known_name_entries = sorted(known_names_list, key=lambda x: x['name'].lower())
|
||||
self.selected_entries_to_return = []
|
||||
|
||||
# --- Basic Window Setup ---
|
||||
app_icon = get_app_icon_object()
|
||||
if app_icon and not app_icon.isNull():
|
||||
self.setWindowIcon(app_icon)
|
||||
|
||||
# Set window size dynamically
|
||||
screen_geometry = QApplication.primaryScreen().availableGeometry()
|
||||
base_width, base_height = 460, 450
|
||||
scale_factor_h = screen_geometry.height() / 1080.0
|
||||
effective_scale_factor = max(0.75, min(scale_factor_h, 1.5))
|
||||
self.setMinimumSize(int(base_width * effective_scale_factor), int(base_height * effective_scale_factor))
|
||||
self.resize(int(base_width * effective_scale_factor * 1.1), int(base_height * effective_scale_factor * 1.1))
|
||||
|
||||
# --- Initialize UI and Apply Theming ---
|
||||
self._init_ui()
|
||||
self._retranslate_ui()
|
||||
self._apply_theme()
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts for the dialog."""
|
||||
main_layout = QVBoxLayout(self)
|
||||
|
||||
self.search_input = QLineEdit()
|
||||
self.search_input.textChanged.connect(self._filter_list_display)
|
||||
main_layout.addWidget(self.search_input)
|
||||
|
||||
self.names_list_widget = QListWidget()
|
||||
self._populate_list_widget()
|
||||
main_layout.addWidget(self.names_list_widget)
|
||||
|
||||
# --- Control Buttons ---
|
||||
buttons_layout = QHBoxLayout()
|
||||
|
||||
self.select_all_button = QPushButton()
|
||||
self.select_all_button.clicked.connect(self._select_all_items)
|
||||
buttons_layout.addWidget(self.select_all_button)
|
||||
|
||||
self.deselect_all_button = QPushButton()
|
||||
self.deselect_all_button.clicked.connect(self._deselect_all_items)
|
||||
buttons_layout.addWidget(self.deselect_all_button)
|
||||
buttons_layout.addStretch(1)
|
||||
|
||||
self.add_button = QPushButton()
|
||||
self.add_button.clicked.connect(self._accept_selection_action)
|
||||
self.add_button.setDefault(True)
|
||||
buttons_layout.addWidget(self.add_button)
|
||||
|
||||
self.cancel_button = QPushButton()
|
||||
self.cancel_button.clicked.connect(self.reject)
|
||||
buttons_layout.addWidget(self.cancel_button)
|
||||
main_layout.addLayout(buttons_layout)
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper to get translation based on the main application's current language."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _retranslate_ui(self):
|
||||
"""Sets the text for all translatable UI elements."""
|
||||
self.setWindowTitle(self._tr("known_names_filter_dialog_title", "Add Known Names to Filter"))
|
||||
self.search_input.setPlaceholderText(self._tr("known_names_filter_search_placeholder", "Search names..."))
|
||||
self.select_all_button.setText(self._tr("known_names_filter_select_all_button", "Select All"))
|
||||
self.deselect_all_button.setText(self._tr("known_names_filter_deselect_all_button", "Deselect All"))
|
||||
self.add_button.setText(self._tr("known_names_filter_add_selected_button", "Add Selected"))
|
||||
self.cancel_button.setText(self._tr("fav_posts_cancel_button", "Cancel"))
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'get_dark_theme') and self.parent_app.current_theme == "dark":
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
|
||||
def _populate_list_widget(self):
|
||||
"""Populates the list widget with the known names."""
|
||||
self.names_list_widget.clear()
|
||||
for entry_obj in self.all_known_name_entries:
|
||||
item = QListWidgetItem(entry_obj['name'])
|
||||
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
|
||||
item.setCheckState(Qt.Unchecked)
|
||||
item.setData(Qt.UserRole, entry_obj)
|
||||
self.names_list_widget.addItem(item)
|
||||
|
||||
def _filter_list_display(self):
|
||||
"""Filters the displayed list based on the search input text."""
|
||||
search_text_lower = self.search_input.text().lower()
|
||||
for i in range(self.names_list_widget.count()):
|
||||
item = self.names_list_widget.item(i)
|
||||
entry_obj = item.data(Qt.UserRole)
|
||||
matches_search = not search_text_lower or search_text_lower in entry_obj['name'].lower()
|
||||
item.setHidden(not matches_search)
|
||||
|
||||
def _select_all_items(self):
|
||||
"""Checks all visible items in the list widget."""
|
||||
for i in range(self.names_list_widget.count()):
|
||||
item = self.names_list_widget.item(i)
|
||||
if not item.isHidden():
|
||||
item.setCheckState(Qt.Checked)
|
||||
|
||||
def _deselect_all_items(self):
|
||||
"""Unchecks all items in the list widget."""
|
||||
for i in range(self.names_list_widget.count()):
|
||||
self.names_list_widget.item(i).setCheckState(Qt.Unchecked)
|
||||
|
||||
def _accept_selection_action(self):
|
||||
"""Gathers the selected entries and accepts the dialog."""
|
||||
self.selected_entries_to_return = []
|
||||
for i in range(self.names_list_widget.count()):
|
||||
item = self.names_list_widget.item(i)
|
||||
if item.checkState() == Qt.Checked:
|
||||
self.selected_entries_to_return.append(item.data(Qt.UserRole))
|
||||
self.accept()
|
||||
|
||||
def get_selected_entries(self):
|
||||
"""Returns the list of known name entries selected by the user."""
|
||||
return self.selected_entries_to_return
|
||||
217
src/ui/dialogs/TourDialog.py
Normal file
217
src/ui/dialogs/TourDialog.py
Normal file
@ -0,0 +1,217 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import sys
|
||||
|
||||
# --- PyQt5 Imports ---
|
||||
from PyQt5.QtCore import pyqtSignal, Qt, QSettings, QCoreApplication
|
||||
from PyQt5.QtWidgets import (
|
||||
QApplication, QDialog, QHBoxLayout, QLabel, QPushButton, QVBoxLayout,
|
||||
QStackedWidget, QScrollArea, QFrame, QWidget, QCheckBox
|
||||
)
|
||||
|
||||
# --- Local Application Imports ---
|
||||
from ...i18n.translator import get_translation
|
||||
from ..main_window import get_app_icon_object
|
||||
from ...config.constants import (
|
||||
CONFIG_ORGANIZATION_NAME
|
||||
)
|
||||
|
||||
|
||||
class TourStepWidget(QWidget):
|
||||
"""
|
||||
A custom widget representing a single step or page in the feature tour.
|
||||
It neatly formats a title and its corresponding content.
|
||||
"""
|
||||
def __init__(self, title_text, content_text, parent=None):
|
||||
super().__init__(parent)
|
||||
layout = QVBoxLayout(self)
|
||||
layout.setContentsMargins(20, 20, 20, 20)
|
||||
layout.setSpacing(10)
|
||||
|
||||
title_label = QLabel(title_text)
|
||||
title_label.setAlignment(Qt.AlignCenter)
|
||||
title_label.setStyleSheet("font-size: 18px; font-weight: bold; color: #E0E0E0; padding-bottom: 15px;")
|
||||
layout.addWidget(title_label)
|
||||
|
||||
scroll_area = QScrollArea()
|
||||
scroll_area.setWidgetResizable(True)
|
||||
scroll_area.setFrameShape(QFrame.NoFrame)
|
||||
scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
|
||||
scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAsNeeded)
|
||||
scroll_area.setStyleSheet("background-color: transparent;")
|
||||
|
||||
content_label = QLabel(content_text)
|
||||
content_label.setWordWrap(True)
|
||||
content_label.setAlignment(Qt.AlignLeft | Qt.AlignTop)
|
||||
content_label.setTextFormat(Qt.RichText)
|
||||
content_label.setOpenExternalLinks(True)
|
||||
content_label.setStyleSheet("font-size: 11pt; color: #C8C8C8; line-height: 1.8;")
|
||||
scroll_area.setWidget(content_label)
|
||||
layout.addWidget(scroll_area, 1)
|
||||
|
||||
|
||||
class TourDialog(QDialog):
|
||||
"""
|
||||
A dialog that shows a multi-page tour to the user on first launch.
|
||||
Includes a "Never show again" checkbox and uses QSettings to remember this preference.
|
||||
"""
|
||||
tour_finished_normally = pyqtSignal()
|
||||
tour_skipped = pyqtSignal()
|
||||
|
||||
# Constants for QSettings
|
||||
CONFIG_APP_NAME_TOUR = "ApplicationTour"
|
||||
TOUR_SHOWN_KEY = "neverShowTourAgainV19"
|
||||
|
||||
def __init__(self, parent_app, parent=None):
|
||||
"""
|
||||
Initializes the dialog.
|
||||
|
||||
Args:
|
||||
parent_app (DownloaderApp): A reference to the main application window.
|
||||
parent (QWidget, optional): The parent widget. Defaults to None.
|
||||
"""
|
||||
super().__init__(parent)
|
||||
self.settings = QSettings(CONFIG_ORGANIZATION_NAME, self.CONFIG_APP_NAME_TOUR)
|
||||
self.current_step = 0
|
||||
self.parent_app = parent_app
|
||||
|
||||
self.setWindowIcon(get_app_icon_object())
|
||||
self.setModal(True)
|
||||
self.setFixedSize(600, 620)
|
||||
|
||||
self._init_ui()
|
||||
self._apply_theme()
|
||||
self._center_on_screen()
|
||||
|
||||
def _tr(self, key, default_text=""):
|
||||
"""Helper for translation."""
|
||||
if callable(get_translation) and self.parent_app:
|
||||
return get_translation(self.parent_app.current_selected_language, key, default_text)
|
||||
return default_text
|
||||
|
||||
def _init_ui(self):
|
||||
"""Initializes all UI components and layouts."""
|
||||
main_layout = QVBoxLayout(self)
|
||||
main_layout.setContentsMargins(0, 0, 0, 0)
|
||||
main_layout.setSpacing(0)
|
||||
|
||||
self.stacked_widget = QStackedWidget()
|
||||
main_layout.addWidget(self.stacked_widget, 1)
|
||||
|
||||
# Load content for each step
|
||||
steps_content = [
|
||||
("tour_dialog_step1_title", "tour_dialog_step1_content"),
|
||||
("tour_dialog_step2_title", "tour_dialog_step2_content"),
|
||||
("tour_dialog_step3_title", "tour_dialog_step3_content"),
|
||||
("tour_dialog_step4_title", "tour_dialog_step4_content"),
|
||||
("tour_dialog_step5_title", "tour_dialog_step5_content"),
|
||||
("tour_dialog_step6_title", "tour_dialog_step6_content"),
|
||||
("tour_dialog_step7_title", "tour_dialog_step7_content"),
|
||||
("tour_dialog_step8_title", "tour_dialog_step8_content"),
|
||||
]
|
||||
|
||||
self.tour_steps_widgets = []
|
||||
for title_key, content_key in steps_content:
|
||||
title = self._tr(title_key, title_key)
|
||||
content = self._tr(content_key, "Content not found.")
|
||||
step_widget = TourStepWidget(title, content)
|
||||
self.tour_steps_widgets.append(step_widget)
|
||||
self.stacked_widget.addWidget(step_widget)
|
||||
|
||||
self.setWindowTitle(self._tr("tour_dialog_title", "Welcome to Kemono Downloader!"))
|
||||
|
||||
# --- Bottom Controls ---
|
||||
bottom_controls_layout = QVBoxLayout()
|
||||
bottom_controls_layout.setContentsMargins(15, 10, 15, 15)
|
||||
bottom_controls_layout.setSpacing(12)
|
||||
|
||||
self.never_show_again_checkbox = QCheckBox(self._tr("tour_dialog_never_show_checkbox", "Never show this tour again"))
|
||||
bottom_controls_layout.addWidget(self.never_show_again_checkbox, 0, Qt.AlignLeft)
|
||||
|
||||
buttons_layout = QHBoxLayout()
|
||||
buttons_layout.setSpacing(10)
|
||||
self.skip_button = QPushButton(self._tr("tour_dialog_skip_button", "Skip Tour"))
|
||||
self.skip_button.clicked.connect(self._skip_tour_action)
|
||||
self.back_button = QPushButton(self._tr("tour_dialog_back_button", "Back"))
|
||||
self.back_button.clicked.connect(self._previous_step)
|
||||
self.next_button = QPushButton(self._tr("tour_dialog_next_button", "Next"))
|
||||
self.next_button.clicked.connect(self._next_step_action)
|
||||
self.next_button.setDefault(True)
|
||||
|
||||
buttons_layout.addWidget(self.skip_button)
|
||||
buttons_layout.addStretch(1)
|
||||
buttons_layout.addWidget(self.back_button)
|
||||
buttons_layout.addWidget(self.next_button)
|
||||
|
||||
bottom_controls_layout.addLayout(buttons_layout)
|
||||
main_layout.addLayout(bottom_controls_layout)
|
||||
|
||||
self._update_button_states()
|
||||
|
||||
def _apply_theme(self):
|
||||
"""Applies the current theme from the parent application."""
|
||||
if self.parent_app and hasattr(self.parent_app, 'get_dark_theme') and self.parent_app.current_theme == "dark":
|
||||
self.setStyleSheet(self.parent_app.get_dark_theme())
|
||||
else:
|
||||
self.setStyleSheet("QDialog { background-color: #f0f0f0; }")
|
||||
|
||||
def _center_on_screen(self):
|
||||
"""Centers the dialog on the screen."""
|
||||
try:
|
||||
screen_geo = QApplication.primaryScreen().availableGeometry()
|
||||
self.move(screen_geo.center() - self.rect().center())
|
||||
except Exception as e:
|
||||
print(f"[TourDialog] Error centering dialog: {e}")
|
||||
|
||||
def _next_step_action(self):
|
||||
"""Moves to the next step or finishes the tour."""
|
||||
if self.current_step < len(self.tour_steps_widgets) - 1:
|
||||
self.current_step += 1
|
||||
self.stacked_widget.setCurrentIndex(self.current_step)
|
||||
else:
|
||||
self._finish_tour_action()
|
||||
self._update_button_states()
|
||||
|
||||
def _previous_step(self):
|
||||
"""Moves to the previous step."""
|
||||
if self.current_step > 0:
|
||||
self.current_step -= 1
|
||||
self.stacked_widget.setCurrentIndex(self.current_step)
|
||||
self._update_button_states()
|
||||
|
||||
def _update_button_states(self):
|
||||
"""Updates the state and text of navigation buttons."""
|
||||
is_last_step = self.current_step == len(self.tour_steps_widgets) - 1
|
||||
self.next_button.setText(self._tr("tour_dialog_finish_button", "Finish") if is_last_step else self._tr("tour_dialog_next_button", "Next"))
|
||||
self.back_button.setEnabled(self.current_step > 0)
|
||||
|
||||
def _skip_tour_action(self):
|
||||
"""Handles the action when the tour is skipped."""
|
||||
self._save_settings_if_checked()
|
||||
self.tour_skipped.emit()
|
||||
self.reject()
|
||||
|
||||
def _finish_tour_action(self):
|
||||
"""Handles the action when the tour is finished normally."""
|
||||
self._save_settings_if_checked()
|
||||
self.tour_finished_normally.emit()
|
||||
self.accept()
|
||||
|
||||
def _save_settings_if_checked(self):
|
||||
"""Saves the 'never show again' preference to QSettings."""
|
||||
self.settings.setValue(self.TOUR_SHOWN_KEY, self.never_show_again_checkbox.isChecked())
|
||||
self.settings.sync()
|
||||
|
||||
@staticmethod
|
||||
def should_show_tour():
|
||||
"""Checks QSettings to see if the tour should be shown on startup."""
|
||||
settings = QSettings(TourDialog.CONFIG_ORGANIZATION_NAME, TourDialog.CONFIG_APP_NAME_TOUR)
|
||||
never_show = settings.value(TourDialog.TOUR_SHOWN_KEY, False, type=bool)
|
||||
return not never_show
|
||||
|
||||
CONFIG_ORGANIZATION_NAME = CONFIG_ORGANIZATION_NAME
|
||||
|
||||
def closeEvent(self, event):
|
||||
"""Ensures settings are saved if the dialog is closed via the 'X' button."""
|
||||
self._skip_tour_action()
|
||||
super().closeEvent(event)
|
||||
1
src/ui/dialogs/__init__.py
Normal file
1
src/ui/dialogs/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
5372
src/ui/main_window.py
Normal file
5372
src/ui/main_window.py
Normal file
File diff suppressed because it is too large
Load Diff
1
src/utils/__init__.py
Normal file
1
src/utils/__init__.py
Normal file
@ -0,0 +1 @@
|
||||
# ...existing code...
|
||||
142
src/utils/file_utils.py
Normal file
142
src/utils/file_utils.py
Normal file
@ -0,0 +1,142 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import re
|
||||
|
||||
# --- Module Constants ---
|
||||
|
||||
# This will be populated at runtime by the main application,
|
||||
# but is defined here as it's conceptually related to file/folder naming.
|
||||
KNOWN_NAMES = []
|
||||
|
||||
MAX_FILENAME_COMPONENT_LENGTH = 150
|
||||
|
||||
# Sets of file extensions for quick type checking
|
||||
IMAGE_EXTENSIONS = {
|
||||
'.jpg', '.jpeg', '.png', '.gif', '.bmp', '.tiff', '.tif', '.webp',
|
||||
'.heic', '.heif', '.svg', '.ico', '.jfif', '.pjpeg', '.pjp', '.avif'
|
||||
}
|
||||
VIDEO_EXTENSIONS = {
|
||||
'.mp4', '.mov', '.mkv', '.webm', '.avi', '.wmv', '.flv', '.mpeg',
|
||||
'.mpg', '.m4v', '.3gp', '.ogv', '.ts', '.vob'
|
||||
}
|
||||
ARCHIVE_EXTENSIONS = {
|
||||
'.zip', '.rar', '.7z', '.tar', '.gz', '.bz2'
|
||||
}
|
||||
AUDIO_EXTENSIONS = {
|
||||
'.mp3', '.wav', '.aac', '.flac', '.ogg', '.wma', '.m4a', '.opus',
|
||||
'.aiff', '.ape', '.mid', '.midi'
|
||||
}
|
||||
|
||||
# Words to ignore when trying to generate a folder name from a title
|
||||
FOLDER_NAME_STOP_WORDS = {
|
||||
"a", "alone", "am", "an", "and", "at", "be", "blues", "but", "by", "com",
|
||||
"for", "grown", "hard", "he", "her", "his", "hitting", "i", "im", "in", "is", "it", "its",
|
||||
"me", "much", "my", "net", "not", "of", "on", "or", "org", "our", "please",
|
||||
"right", "s", "she", "so", "technically", "tell", "the", "their", "they", "this",
|
||||
"to", "ve", "was", "we", "well", "were", "with", "www", "year", "you", "your",
|
||||
}
|
||||
|
||||
# --- File and Folder Name Utilities ---
|
||||
|
||||
def clean_folder_name(name):
|
||||
"""
|
||||
Sanitizes a string to make it a valid folder name.
|
||||
Removes invalid characters and trims whitespace.
|
||||
|
||||
Args:
|
||||
name (str): The input string.
|
||||
|
||||
Returns:
|
||||
str: A sanitized, valid folder name.
|
||||
"""
|
||||
if not isinstance(name, str):
|
||||
name = str(name)
|
||||
|
||||
# Remove characters that are invalid in folder names on most OS
|
||||
cleaned = re.sub(r'[<>:"/\\|?*]', '', name)
|
||||
cleaned = cleaned.strip()
|
||||
|
||||
# Replace multiple spaces with a single space
|
||||
cleaned = re.sub(r'\s+', ' ', cleaned)
|
||||
|
||||
# If after cleaning the name is empty, provide a default
|
||||
if not cleaned:
|
||||
return "untitled_folder"
|
||||
|
||||
# Truncate to a reasonable length
|
||||
if len(cleaned) > MAX_FILENAME_COMPONENT_LENGTH:
|
||||
cleaned = cleaned[:MAX_FILENAME_COMPONENT_LENGTH]
|
||||
|
||||
# Remove trailing dots or spaces, which can be problematic
|
||||
cleaned = cleaned.rstrip('. ')
|
||||
|
||||
return cleaned if cleaned else "untitled_folder"
|
||||
|
||||
|
||||
def clean_filename(name):
|
||||
"""
|
||||
Sanitizes a string to make it a valid file name.
|
||||
|
||||
Args:
|
||||
name (str): The input string.
|
||||
|
||||
Returns:
|
||||
str: A sanitized, valid file name.
|
||||
"""
|
||||
if not isinstance(name, str):
|
||||
name = str(name)
|
||||
|
||||
cleaned = re.sub(r'[<>:"/\\|?*]', '_', name)
|
||||
cleaned = cleaned.strip()
|
||||
|
||||
if not cleaned:
|
||||
return "untitled_file"
|
||||
|
||||
base_name, ext = os.path.splitext(cleaned)
|
||||
max_base_len = MAX_FILENAME_COMPONENT_LENGTH - len(ext)
|
||||
|
||||
if len(base_name) > max_base_len:
|
||||
if max_base_len > 0:
|
||||
base_name = base_name[:max_base_len]
|
||||
else:
|
||||
# Handle cases where the extension itself is too long
|
||||
return cleaned[:MAX_FILENAME_COMPONENT_LENGTH]
|
||||
|
||||
return base_name + ext
|
||||
|
||||
|
||||
# --- File Type Identification Functions ---
|
||||
|
||||
def is_image(filename):
|
||||
"""Checks if a filename has a common image extension."""
|
||||
if not filename: return False
|
||||
_, ext = os.path.splitext(filename)
|
||||
return ext.lower() in IMAGE_EXTENSIONS
|
||||
|
||||
def is_video(filename):
|
||||
"""Checks if a filename has a common video extension."""
|
||||
if not filename: return False
|
||||
_, ext = os.path.splitext(filename)
|
||||
return ext.lower() in VIDEO_EXTENSIONS
|
||||
|
||||
def is_zip(filename):
|
||||
"""Checks if a filename is a .zip file."""
|
||||
if not filename: return False
|
||||
return filename.lower().endswith('.zip')
|
||||
|
||||
def is_rar(filename):
|
||||
"""Checks if a filename is a .rar file."""
|
||||
if not filename: return False
|
||||
return filename.lower().endswith('.rar')
|
||||
|
||||
def is_archive(filename):
|
||||
"""Checks if a filename has a common archive extension."""
|
||||
if not filename: return False
|
||||
_, ext = os.path.splitext(filename)
|
||||
return ext.lower() in ARCHIVE_EXTENSIONS
|
||||
|
||||
def is_audio(filename):
|
||||
"""Checks if a filename has a common audio extension."""
|
||||
if not filename: return False
|
||||
_, ext = os.path.splitext(filename)
|
||||
return ext.lower() in AUDIO_EXTENSIONS
|
||||
208
src/utils/network_utils.py
Normal file
208
src/utils/network_utils.py
Normal file
@ -0,0 +1,208 @@
|
||||
# --- Standard Library Imports ---
|
||||
import os
|
||||
import re
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# --- Third-Party Library Imports ---
|
||||
# This module might not require third-party libraries directly,
|
||||
# but 'requests' is a common dependency for network operations.
|
||||
# import requests
|
||||
|
||||
|
||||
def parse_cookie_string(cookie_string):
|
||||
"""
|
||||
Parses a 'name=value; name2=value2' cookie string into a dictionary.
|
||||
|
||||
Args:
|
||||
cookie_string (str): The cookie string from browser tools.
|
||||
|
||||
Returns:
|
||||
dict or None: A dictionary of cookie names and values, or None if empty.
|
||||
"""
|
||||
cookies = {}
|
||||
if cookie_string:
|
||||
for item in cookie_string.split(';'):
|
||||
parts = item.split('=', 1)
|
||||
if len(parts) == 2:
|
||||
name = parts[0].strip()
|
||||
value = parts[1].strip()
|
||||
if name:
|
||||
cookies[name] = value
|
||||
return cookies if cookies else None
|
||||
|
||||
|
||||
def load_cookies_from_netscape_file(filepath, logger_func, target_domain_filter=None):
|
||||
"""
|
||||
Loads cookies from a Netscape-formatted cookies.txt file.
|
||||
|
||||
If a target_domain_filter is provided, only cookies for that domain
|
||||
(or its subdomains) are returned.
|
||||
|
||||
Args:
|
||||
filepath (str): The full path to the cookies.txt file.
|
||||
logger_func (callable): Function to use for logging.
|
||||
target_domain_filter (str, optional): The domain to filter cookies for.
|
||||
|
||||
Returns:
|
||||
dict or None: A dictionary of cookie names and values, or None if none are loaded.
|
||||
"""
|
||||
cookies = {}
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line or line.startswith('#'):
|
||||
continue
|
||||
|
||||
parts = line.split('\t')
|
||||
if len(parts) == 7:
|
||||
cookie_domain = parts[0]
|
||||
name = parts[5]
|
||||
value = parts[6]
|
||||
|
||||
if not name:
|
||||
continue
|
||||
|
||||
if target_domain_filter:
|
||||
# Match domain exactly or as a subdomain
|
||||
host_to_match = target_domain_filter.lower()
|
||||
cookie_domain_norm = cookie_domain.lower()
|
||||
if (cookie_domain_norm.startswith('.') and host_to_match.endswith(cookie_domain_norm)) or \
|
||||
(host_to_match == cookie_domain_norm):
|
||||
cookies[name] = value
|
||||
else:
|
||||
cookies[name] = value
|
||||
|
||||
logger_func(f" 🍪 Loaded {len(cookies)} cookies from '{os.path.basename(filepath)}' for domain '{target_domain_filter or 'any'}'.")
|
||||
return cookies if cookies else None
|
||||
except FileNotFoundError:
|
||||
logger_func(f" 🍪 Cookie file '{os.path.basename(filepath)}' not found.")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger_func(f" 🍪 Error parsing cookie file '{os.path.basename(filepath)}': {e}")
|
||||
return None
|
||||
|
||||
|
||||
def prepare_cookies_for_request(use_cookie_flag, cookie_text_input, selected_cookie_file_path, app_base_dir, logger_func, target_domain=None):
|
||||
"""
|
||||
Prepares a cookie dictionary from various sources based on user settings.
|
||||
Priority:
|
||||
1. UI-selected file path.
|
||||
2. Domain-specific file in the app directory.
|
||||
3. Default `cookies.txt` in the app directory.
|
||||
4. Manually entered cookie text.
|
||||
|
||||
Args:
|
||||
use_cookie_flag (bool): Whether cookies are enabled in the UI.
|
||||
cookie_text_input (str): The raw text from the cookie input field.
|
||||
selected_cookie_file_path (str): The path to a user-browsed cookie file.
|
||||
app_base_dir (str): The base directory of the application.
|
||||
logger_func (callable): Function for logging.
|
||||
target_domain (str, optional): The domain for which cookies are needed.
|
||||
|
||||
Returns:
|
||||
dict or None: A dictionary of cookies for the request, or None.
|
||||
"""
|
||||
if not use_cookie_flag:
|
||||
return None
|
||||
|
||||
# Priority 1: Use the specifically browsed file first
|
||||
if selected_cookie_file_path and os.path.exists(selected_cookie_file_path):
|
||||
cookies = load_cookies_from_netscape_file(selected_cookie_file_path, logger_func, target_domain)
|
||||
if cookies:
|
||||
return cookies
|
||||
|
||||
# Priority 2: Look for a domain-specific cookie file
|
||||
if app_base_dir and target_domain:
|
||||
domain_specific_path = os.path.join(app_base_dir, "data", f"{target_domain}_cookies.txt")
|
||||
if os.path.exists(domain_specific_path):
|
||||
cookies = load_cookies_from_netscape_file(domain_specific_path, logger_func, target_domain)
|
||||
if cookies:
|
||||
return cookies
|
||||
|
||||
# Priority 3: Look for a generic cookies.txt
|
||||
if app_base_dir:
|
||||
default_path = os.path.join(app_base_dir, "appdata", "cookies.txt")
|
||||
if os.path.exists(default_path):
|
||||
cookies = load_cookies_from_netscape_file(default_path, logger_func, target_domain)
|
||||
if cookies:
|
||||
return cookies
|
||||
|
||||
# Priority 4: Fall back to manually entered text
|
||||
if cookie_text_input:
|
||||
cookies = parse_cookie_string(cookie_text_input)
|
||||
if cookies:
|
||||
return cookies
|
||||
|
||||
logger_func(f" 🍪 Cookie usage enabled for '{target_domain or 'any'}', but no valid cookies found.")
|
||||
return None
|
||||
|
||||
|
||||
def extract_post_info(url_string):
|
||||
"""
|
||||
Parses a URL string to extract the service, user ID, and post ID.
|
||||
|
||||
Args:
|
||||
url_string (str): The URL to parse.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing (service, user_id, post_id). Any can be None.
|
||||
"""
|
||||
if not isinstance(url_string, str) or not url_string.strip():
|
||||
return None, None, None
|
||||
|
||||
try:
|
||||
parsed_url = urlparse(url_string.strip())
|
||||
path_parts = [part for part in parsed_url.path.strip('/').split('/') if part]
|
||||
|
||||
# Standard format: /<service>/user/<user_id>/post/<post_id>
|
||||
if len(path_parts) >= 3 and path_parts[1].lower() == 'user':
|
||||
service = path_parts[0]
|
||||
user_id = path_parts[2]
|
||||
post_id = path_parts[4] if len(path_parts) >= 5 and path_parts[3].lower() == 'post' else None
|
||||
return service, user_id, post_id
|
||||
|
||||
# API format: /api/v1/<service>/user/<user_id>...
|
||||
if len(path_parts) >= 5 and path_parts[0:2] == ['api', 'v1'] and path_parts[3].lower() == 'user':
|
||||
service = path_parts[2]
|
||||
user_id = path_parts[4]
|
||||
post_id = path_parts[6] if len(path_parts) >= 7 and path_parts[5].lower() == 'post' else None
|
||||
return service, user_id, post_id
|
||||
|
||||
except Exception as e:
|
||||
print(f"Debug: Exception during URL parsing for '{url_string}': {e}")
|
||||
|
||||
return None, None, None
|
||||
|
||||
|
||||
def get_link_platform(url):
|
||||
"""
|
||||
Identifies the platform of a given URL based on its domain.
|
||||
|
||||
Args:
|
||||
url (str): The URL to identify.
|
||||
|
||||
Returns:
|
||||
str: The name of the platform (e.g., 'mega', 'google drive') or 'external'.
|
||||
"""
|
||||
try:
|
||||
domain = urlparse(url).netloc.lower()
|
||||
if 'drive.google.com' in domain: return 'google drive'
|
||||
if 'mega.nz' in domain or 'mega.io' in domain: return 'mega'
|
||||
if 'dropbox.com' in domain: return 'dropbox'
|
||||
if 'patreon.com' in domain: return 'patreon'
|
||||
if 'gofile.io' in domain: return 'gofile'
|
||||
if 'instagram.com' in domain: return 'instagram'
|
||||
if 'twitter.com' in domain or 'x.com' in domain: return 'twitter/x'
|
||||
if 'discord.gg' in domain or 'discord.com/invite' in domain: return 'discord invite'
|
||||
if 'pixiv.net' in domain: return 'pixiv'
|
||||
if 'kemono.su' in domain or 'kemono.party' in domain: return 'kemono'
|
||||
if 'coomer.su' in domain or 'coomer.party' in domain: return 'coomer'
|
||||
|
||||
# Fallback to a generic name for other domains
|
||||
parts = domain.split('.')
|
||||
if len(parts) >= 2:
|
||||
return parts[-2]
|
||||
return 'external'
|
||||
except Exception:
|
||||
return 'unknown'
|
||||
207
src/utils/text_utils.py
Normal file
207
src/utils/text_utils.py
Normal file
@ -0,0 +1,207 @@
|
||||
# --- Standard Library Imports ---
|
||||
import re
|
||||
import html
|
||||
|
||||
# --- Local Application Imports ---
|
||||
# Import from file_utils within the same package
|
||||
from .file_utils import clean_folder_name, FOLDER_NAME_STOP_WORDS
|
||||
|
||||
# --- Module Constants ---
|
||||
|
||||
# Regular expression patterns for cleaning up titles before matching against Known.txt
|
||||
KNOWN_TXT_MATCH_CLEANUP_PATTERNS = [
|
||||
r'\bcum\b',
|
||||
r'\bnsfw\b',
|
||||
r'\bsfw\b',
|
||||
r'\bweb\b',
|
||||
r'\bhd\b',
|
||||
r'\bhi\s*res\b',
|
||||
r'\bhigh\s*res\b',
|
||||
r'\b\d+p\b',
|
||||
r'\b\d+k\b',
|
||||
r'\[OC\]',
|
||||
r'\[Request(?:s)?\]',
|
||||
r'\bCommission\b',
|
||||
r'\bComm\b',
|
||||
r'\bPreview\b',
|
||||
]
|
||||
|
||||
# --- Text Matching and Manipulation Utilities ---
|
||||
|
||||
def is_title_match_for_character(post_title, character_name_filter):
|
||||
"""
|
||||
Checks if a post title contains a specific character name as a whole word.
|
||||
Case-insensitive.
|
||||
|
||||
Args:
|
||||
post_title (str): The title of the post.
|
||||
character_name_filter (str): The character name to search for.
|
||||
|
||||
Returns:
|
||||
bool: True if the name is found as a whole word, False otherwise.
|
||||
"""
|
||||
if not post_title or not character_name_filter:
|
||||
return False
|
||||
|
||||
# Use word boundaries (\b) to match whole words only
|
||||
pattern = r"(?i)\b" + re.escape(str(character_name_filter).strip()) + r"\b"
|
||||
return bool(re.search(pattern, post_title))
|
||||
|
||||
|
||||
def is_filename_match_for_character(filename, character_name_filter):
|
||||
"""
|
||||
Checks if a filename contains a character name. This is a simple substring check.
|
||||
Case-insensitive.
|
||||
|
||||
Args:
|
||||
filename (str): The name of the file.
|
||||
character_name_filter (str): The character name to search for.
|
||||
|
||||
Returns:
|
||||
bool: True if the substring is found, False otherwise.
|
||||
"""
|
||||
if not filename or not character_name_filter:
|
||||
return False
|
||||
|
||||
return str(character_name_filter).strip().lower() in filename.lower()
|
||||
|
||||
|
||||
def strip_html_tags(html_text):
|
||||
"""
|
||||
Removes HTML tags from a string and cleans up resulting whitespace.
|
||||
|
||||
Args:
|
||||
html_text (str): The input string containing HTML.
|
||||
|
||||
Returns:
|
||||
str: The text with HTML tags removed.
|
||||
"""
|
||||
if not html_text:
|
||||
return ""
|
||||
# First, unescape HTML entities like & -> &
|
||||
text = html.unescape(str(html_text))
|
||||
# Remove all tags
|
||||
text_after_tag_removal = re.sub(r'<[^>]+>', ' ', text)
|
||||
# Replace multiple whitespace characters with a single space
|
||||
cleaned_text = re.sub(r'\s+', ' ', text_after_tag_removal).strip()
|
||||
return cleaned_text
|
||||
|
||||
|
||||
def extract_folder_name_from_title(title, unwanted_keywords):
|
||||
"""
|
||||
Extracts a plausible folder name from a post title by finding the first
|
||||
significant word that isn't a stop-word.
|
||||
|
||||
Args:
|
||||
title (str): The post title.
|
||||
unwanted_keywords (set): A set of words to ignore.
|
||||
|
||||
Returns:
|
||||
str: The extracted folder name, or 'Uncategorized'.
|
||||
"""
|
||||
if not title:
|
||||
return 'Uncategorized'
|
||||
|
||||
title_lower = title.lower()
|
||||
# Find all whole words in the title
|
||||
tokens = re.findall(r'\b[\w\-]+\b', title_lower)
|
||||
|
||||
for token in tokens:
|
||||
clean_token = clean_folder_name(token)
|
||||
if clean_token and clean_token.lower() not in unwanted_keywords:
|
||||
return clean_token
|
||||
|
||||
# Fallback to cleaning the full title if no single significant word is found
|
||||
cleaned_full_title = clean_folder_name(title)
|
||||
return cleaned_full_title if cleaned_full_title else 'Uncategorized'
|
||||
|
||||
|
||||
def match_folders_from_title(title, names_to_match, unwanted_keywords):
|
||||
"""
|
||||
Matches folder names from a title based on a list of known name objects.
|
||||
Each name object is a dict: {'name': 'PrimaryName', 'aliases': ['alias1', ...]}
|
||||
|
||||
Args:
|
||||
title (str): The post title to check.
|
||||
names_to_match (list): A list of known name dictionaries.
|
||||
unwanted_keywords (set): A set of folder names to ignore.
|
||||
|
||||
Returns:
|
||||
list: A sorted list of matched primary folder names.
|
||||
"""
|
||||
if not title or not names_to_match:
|
||||
return []
|
||||
|
||||
# Clean the title by removing common tags like [OC], [HD], etc.
|
||||
cleaned_title = title
|
||||
for pat_str in KNOWN_TXT_MATCH_CLEANUP_PATTERNS:
|
||||
cleaned_title = re.sub(pat_str, ' ', cleaned_title, flags=re.IGNORECASE)
|
||||
cleaned_title = re.sub(r'\s+', ' ', cleaned_title).strip()
|
||||
title_lower = cleaned_title.lower()
|
||||
|
||||
matched_cleaned_names = set()
|
||||
|
||||
# Sort by name length descending to match longer names first (e.g., "Cloud Strife" before "Cloud")
|
||||
sorted_name_objects = sorted(names_to_match, key=lambda x: len(x.get("name", "")), reverse=True)
|
||||
|
||||
for name_obj in sorted_name_objects:
|
||||
primary_folder_name = name_obj.get("name")
|
||||
aliases = name_obj.get("aliases", [])
|
||||
if not primary_folder_name or not aliases:
|
||||
continue
|
||||
|
||||
for alias in aliases:
|
||||
alias_lower = alias.lower()
|
||||
if not alias_lower: continue
|
||||
|
||||
# Use word boundaries for accurate matching
|
||||
pattern = r'\b' + re.escape(alias_lower) + r'\b'
|
||||
if re.search(pattern, title_lower):
|
||||
cleaned_primary_name = clean_folder_name(primary_folder_name)
|
||||
if cleaned_primary_name.lower() not in unwanted_keywords:
|
||||
matched_cleaned_names.add(cleaned_primary_name)
|
||||
break # Move to the next name object once a match is found for this one
|
||||
|
||||
return sorted(list(matched_cleaned_names))
|
||||
|
||||
|
||||
def match_folders_from_filename_enhanced(filename, names_to_match, unwanted_keywords):
|
||||
"""
|
||||
Matches folder names from a filename, prioritizing longer and more specific aliases.
|
||||
|
||||
Args:
|
||||
filename (str): The filename to check.
|
||||
names_to_match (list): A list of known name dictionaries.
|
||||
unwanted_keywords (set): A set of folder names to ignore.
|
||||
|
||||
Returns:
|
||||
list: A sorted list of matched primary folder names.
|
||||
"""
|
||||
if not filename or not names_to_match:
|
||||
return []
|
||||
|
||||
filename_lower = filename.lower()
|
||||
matched_primary_names = set()
|
||||
|
||||
# Create a flat list of (alias, primary_name) tuples to sort by alias length
|
||||
alias_map_to_primary = []
|
||||
for name_obj in names_to_match:
|
||||
primary_name = name_obj.get("name")
|
||||
if not primary_name: continue
|
||||
|
||||
cleaned_primary_name = clean_folder_name(primary_name)
|
||||
if not cleaned_primary_name or cleaned_primary_name.lower() in unwanted_keywords:
|
||||
continue
|
||||
|
||||
for alias in name_obj.get("aliases", []):
|
||||
if alias.lower():
|
||||
alias_map_to_primary.append((alias.lower(), cleaned_primary_name))
|
||||
|
||||
# Sort by alias length, descending, to match longer aliases first
|
||||
alias_map_to_primary.sort(key=lambda x: len(x[0]), reverse=True)
|
||||
|
||||
for alias_lower, primary_name_for_alias in alias_map_to_primary:
|
||||
if filename_lower.startswith(alias_lower):
|
||||
matched_primary_names.add(primary_name_for_alias)
|
||||
|
||||
return sorted(list(matched_primary_names))
|
||||
Loading…
x
Reference in New Issue
Block a user