Compare commits

..

No commits in common. "611e8925764c6201f94a3a9abfb330ce0c12cbd9" and "b5b6c1bc468af631ab821867096cc3d15c2617f4" have entirely different histories.

4 changed files with 121 additions and 415 deletions

View File

@ -6,9 +6,7 @@ import requests
import cloudscraper import cloudscraper
from ..utils.network_utils import extract_post_info, prepare_cookies_for_request from ..utils.network_utils import extract_post_info, prepare_cookies_for_request
from ..config.constants import ( from ..config.constants import (
STYLE_DATE_POST_TITLE, STYLE_DATE_POST_TITLE
STYLE_DATE_BASED,
STYLE_POST_TITLE_GLOBAL_NUMBERING
) )
@ -83,6 +81,7 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logger, cookies_dict=None): def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logger, cookies_dict=None):
""" """
--- MODIFIED FUNCTION ---
Fetches the full data, including the 'content' field, for a single post using cloudscraper. Fetches the full data, including the 'content' field, for a single post using cloudscraper.
""" """
post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{post_id}" post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{post_id}"
@ -107,6 +106,7 @@ def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logge
logger(f" ❌ Failed to fetch full content for post {post_id}: {e}") logger(f" ❌ Failed to fetch full content for post {post_id}: {e}")
return None return None
finally: finally:
# CRITICAL FIX: Close the scraper session to free file descriptors and memory
if scraper: if scraper:
scraper.close() scraper.close()
@ -120,6 +120,7 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
logger(f" Fetching comments: {comments_api_url}") logger(f" Fetching comments: {comments_api_url}")
try: try:
# FIX: Use context manager
with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict) as response: with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict) as response:
response.raise_for_status() response.raise_for_status()
response.encoding = 'utf-8' response.encoding = 'utf-8'
@ -179,6 +180,7 @@ def download_from_api(
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}" direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
logger(f" Attempting direct fetch for target post: {direct_post_api_url}") logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
try: try:
# FIX: Use context manager
with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api) as direct_response: with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api) as direct_response:
direct_response.raise_for_status() direct_response.raise_for_status()
direct_response.encoding = 'utf-8' direct_response.encoding = 'utf-8'
@ -206,23 +208,12 @@ def download_from_api(
if target_post_id and (start_page or end_page): if target_post_id and (start_page or end_page):
logger("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).") logger("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).")
# --- FIXED LOGIC HERE --- is_manga_mode_fetch_all_and_sort_oldest_first = manga_mode and (manga_filename_style_for_sort_check != STYLE_DATE_POST_TITLE) and not target_post_id
# Define which styles require fetching ALL posts first (Sequential Mode)
styles_requiring_fetch_all = [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
# Only enable "fetch all and sort" if the current style is explicitly in the list above
is_manga_mode_fetch_all_and_sort_oldest_first = (
manga_mode and
(manga_filename_style_for_sort_check in styles_requiring_fetch_all) and
not target_post_id
)
should_fetch_all = fetch_all_first or is_manga_mode_fetch_all_and_sort_oldest_first should_fetch_all = fetch_all_first or is_manga_mode_fetch_all_and_sort_oldest_first
api_base_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/posts" api_base_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/posts"
page_size = 50 page_size = 50
if is_manga_mode_fetch_all_and_sort_oldest_first: if is_manga_mode_fetch_all_and_sort_oldest_first:
logger(f" Manga Mode (Style: {manga_filename_style_for_sort_check} - Oldest First Sort Active): Fetching all posts to sort by date...") logger(f" Manga Mode (Style: {manga_filename_style_for_sort_check if manga_filename_style_for_sort_check else 'Default'} - Oldest First Sort Active): Fetching all posts to sort by date...")
all_posts_for_manga_mode = [] all_posts_for_manga_mode = []
current_offset_manga = 0 current_offset_manga = 0
if start_page and start_page > 1: if start_page and start_page > 1:
@ -317,9 +308,8 @@ def download_from_api(
yield all_posts_for_manga_mode[i:i + page_size] yield all_posts_for_manga_mode[i:i + page_size]
return return
# Log specific message for styles that are in Manga Mode but NOT sorting (Streaming) if manga_mode and not target_post_id and (manga_filename_style_for_sort_check == STYLE_DATE_POST_TITLE):
if manga_mode and not target_post_id and (manga_filename_style_for_sort_check not in styles_requiring_fetch_all): logger(f" Manga Mode (Style: {STYLE_DATE_POST_TITLE}): Processing posts in default API order (newest first).")
logger(f" Renaming Mode (Style: {manga_filename_style_for_sort_check}): Processing posts in default API order (Streaming).")
current_page_num = 1 current_page_num = 1
current_offset = 0 current_offset = 0

View File

@ -84,18 +84,8 @@ class DownloadManager:
is_single_post = bool(config.get('target_post_id_from_initial_url')) is_single_post = bool(config.get('target_post_id_from_initial_url'))
use_multithreading = config.get('use_multithreading', True) use_multithreading = config.get('use_multithreading', True)
is_manga_sequential = config.get('manga_mode_active') and config.get('manga_filename_style') in [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
# --- FIXED LOGIC: Strict check for sequential fetch modes ---
# Only "Date Based" and "Title + Global Numbering" require fetching the full list first.
# "Custom", "Date + Title", "Original Name", and "Post ID" will now use the pool (streaming).
sequential_styles = [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
is_manga_sequential = (
config.get('manga_mode_active') and
config.get('manga_filename_style') in sequential_styles
)
# If it is NOT a strictly sequential manga mode, we use the pool (fetch-as-we-go)
should_use_multithreading_for_posts = use_multithreading and not is_single_post and not is_manga_sequential should_use_multithreading_for_posts = use_multithreading and not is_single_post and not is_manga_sequential
if should_use_multithreading_for_posts: if should_use_multithreading_for_posts:
@ -107,12 +97,13 @@ class DownloadManager:
fetcher_thread.start() fetcher_thread.start()
else: else:
# Single-threaded mode does not use the manager's complex logic # Single-threaded mode does not use the manager's complex logic
self._log(" Manager is handing off to a single-threaded worker (Sequential Mode)...") self._log(" Manager is handing off to a single-threaded worker...")
# The single-threaded worker will manage its own lifecycle and signals. # The single-threaded worker will manage its own lifecycle and signals.
# The manager's role for this session is effectively over. # The manager's role for this session is effectively over.
self.is_running = False # Allow another session to start if needed self.is_running = False # Allow another session to start if needed
self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)}) self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)})
def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data): def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data):
""" """
Fetches posts from the API in batches and submits them as tasks to a thread pool. Fetches posts from the API in batches and submits them as tasks to a thread pool.
@ -141,110 +132,127 @@ class DownloadManager:
return return
for post_data in posts_to_process: for post_data in posts_to_process:
if self.cancellation_event.is_set(): if self.cancellation_event.is_set(): break
break
worker = PostProcessorWorker(post_data, config, self.progress_queue) worker = PostProcessorWorker(post_data, config, self.progress_queue)
future = self.thread_pool.submit(worker.process) future = self.thread_pool.submit(worker.process)
future.add_done_callback(self._handle_future_result) future.add_done_callback(self._handle_future_result)
self.active_futures.append(future) self.active_futures.append(future)
else: else:
# --- Streaming Logic --- # --- START: REFACTORED STREAMING LOGIC ---
post_generator = download_from_api( post_generator = download_from_api(
api_url_input=config['api_url'], api_url_input=config['api_url'],
logger=self._log, logger=self._log,
start_page=config.get('start_page'), start_page=config.get('start_page'),
end_page=config.get('end_page'), end_page=config.get('end_page'),
manga_mode=config.get('manga_mode_active', False),
cancellation_event=self.cancellation_event, cancellation_event=self.cancellation_event,
pause_event=self.pause_event, pause_event=self.pause_event,
cookies_dict=None # Cookie handling handled inside client if needed, or update if passed use_cookie=config.get('use_cookie', False),
cookie_text=config.get('cookie_text', ''),
selected_cookie_file=config.get('selected_cookie_file'),
app_base_dir=config.get('app_base_dir'),
manga_filename_style_for_sort_check=config.get('manga_filename_style'),
processed_post_ids=list(processed_ids)
) )
for post_batch in post_generator: self.total_posts = 0
self.processed_posts = 0
# Process posts in batches as they are yielded by the API client
for batch in post_generator:
if self.cancellation_event.is_set(): if self.cancellation_event.is_set():
self._log(" Post fetching cancelled.")
break break
if not post_batch: # Filter out any posts that might have been processed since the start
posts_in_batch_to_process = [p for p in batch if p.get('id') not in processed_ids]
if not posts_in_batch_to_process:
continue continue
new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids] # Update total count and immediately inform the UI
self.total_posts += len(posts_in_batch_to_process)
if not new_posts_batch: self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
# Log skipped count for UI feedback if needed, already handled in api_client usually
continue
# Update total posts dynamically as we find them
self.total_posts += len(new_posts_batch)
# Note: total_posts in streaming is a "running total of found posts", not absolute total
for post_data in new_posts_batch:
if self.cancellation_event.is_set():
break
# Pass explicit args or config to worker
# Ideally PostProcessorWorker should accept the whole config dict or mapped args
# For now assuming PostProcessorWorker takes (post_data, config_dict, queue)
# OR we map the config to the args expected by PostProcessorWorker.__init__
# MAPPING CONFIG TO WORKER ARGS (Safe wrapper)
worker_args = self._map_config_to_worker_args(post_data, config)
worker = PostProcessorWorker(**worker_args)
for post_data in posts_in_batch_to_process:
if self.cancellation_event.is_set(): break
worker = PostProcessorWorker(post_data, config, self.progress_queue)
future = self.thread_pool.submit(worker.process) future = self.thread_pool.submit(worker.process)
future.add_done_callback(self._handle_future_result) future.add_done_callback(self._handle_future_result)
self.active_futures.append(future) self.active_futures.append(future)
# Small sleep to prevent UI freeze if batches are huge and instant if self.total_posts == 0 and not self.cancellation_event.is_set():
time.sleep(0.01) self._log("✅ No new posts found to process.")
except Exception as e: except Exception as e:
self._log(f"❌ Critical Error in Fetcher Thread: {e}") self._log(f"❌ CRITICAL ERROR in post fetcher thread: {e}")
traceback.print_exc() self._log(traceback.format_exc())
finally: finally:
self.is_running = False # Mark as not running so we can finish if self.thread_pool:
# The main window checks active futures, so we just exit this thread. self.thread_pool.shutdown(wait=True)
self.is_running = False
self._log("🏁 All processing tasks have completed or been cancelled.")
self.progress_queue.put({
'type': 'finished',
'payload': (self.total_downloads, self.total_skips, self.cancellation_event.is_set(), self.all_kept_original_filenames)
})
def _map_config_to_worker_args(self, post_data, config): def _handle_future_result(self, future: Future):
"""Helper to map the flat config dict to PostProcessorWorker arguments.""" """Callback executed when a worker task completes."""
# This mirrors the arguments in workers.py PostProcessorWorker.__init__ if self.cancellation_event.is_set():
return { return
'post_data': post_data,
'download_root': config.get('output_dir'), with threading.Lock(): # Protect shared counters
'known_names': [], # If needed, pass KNOWN_NAMES or load them self.processed_posts += 1
'filter_character_list': [], # Parsed filters if available in config try:
'emitter': self.progress_queue, if future.cancelled():
'unwanted_keywords': set(), # Parse if needed self._log("⚠️ A post processing task was cancelled.")
'filter_mode': config.get('filter_mode'), self.total_skips += 1
'skip_zip': config.get('skip_zip'), else:
'use_subfolders': config.get('use_subfolders'), result = future.result()
'use_post_subfolders': config.get('use_post_subfolders'), (dl_count, skip_count, kept_originals,
'target_post_id_from_initial_url': config.get('target_post_id_from_initial_url'), retryable, permanent, history) = result
'custom_folder_name': config.get('custom_folder_name'), self.total_downloads += dl_count
'compress_images': config.get('compress_images'), self.total_skips += skip_count
'download_thumbnails': config.get('download_thumbnails'), self.all_kept_original_filenames.extend(kept_originals)
'service': config.get('service') or 'unknown', # extracted elsewhere if retryable:
'user_id': config.get('user_id') or 'unknown', self.progress_queue.put({'type': 'retryable_failure', 'payload': (retryable,)})
'pause_event': self.pause_event, if permanent:
'api_url_input': config.get('api_url'), self.progress_queue.put({'type': 'permanent_failure', 'payload': (permanent,)})
'cancellation_event': self.cancellation_event, if history:
'downloaded_files': None, # Managed per worker or global if passed self.progress_queue.put({'type': 'post_processed_history', 'payload': (history,)})
'downloaded_file_hashes': None, post_id = history.get('post_id')
'downloaded_files_lock': None, if post_id and self.current_creator_profile_path:
'downloaded_file_hashes_lock': None, profile_data = self._setup_creator_profile({'creator_name_for_profile': self.current_creator_name_for_profile, 'session_file_path': self.session_file_path})
# Add other necessary fields from config... if post_id not in profile_data.get('processed_post_ids', []):
'manga_mode_active': config.get('manga_mode_active'), profile_data.setdefault('processed_post_ids', []).append(post_id)
'manga_filename_style': config.get('manga_filename_style'), self._save_creator_profile(profile_data)
'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"), # Pass custom format
'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"), except Exception as e:
'use_multithreading': config.get('use_multithreading', True), self._log(f"❌ Worker task resulted in an exception: {e}")
# Ensure defaults for others self.total_skips += 1 # Count errored posts as skipped
} self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
def _setup_creator_profile(self, config): def _setup_creator_profile(self, config):
"""Prepares the path and loads data for the current creator's profile.""" """Prepares the path and loads data for the current creator's profile."""
# Extract name logic here or assume config has it self.current_creator_name_for_profile = config.get('creator_name_for_profile')
# ... (Same as your existing code) if not self.current_creator_name_for_profile:
self.current_creator_name_for_profile = "Unknown" # Placeholder self._log("⚠️ Cannot create creator profile: Name not provided in config.")
# You should ideally extract name from URL or config here if available return {}
appdata_dir = os.path.dirname(config.get('session_file_path', '.'))
self.creator_profiles_dir = os.path.join(appdata_dir, "creator_profiles")
os.makedirs(self.creator_profiles_dir, exist_ok=True)
safe_filename = clean_folder_name(self.current_creator_name_for_profile) + ".json"
self.current_creator_profile_path = os.path.join(self.creator_profiles_dir, safe_filename)
if os.path.exists(self.current_creator_profile_path):
try:
with open(self.current_creator_profile_path, 'r', encoding='utf-8') as f:
return json.load(f)
except (json.JSONDecodeError, OSError) as e:
self._log(f"❌ Error loading creator profile '{safe_filename}': {e}. Starting fresh.")
return {} return {}
def _save_creator_profile(self, data): def _save_creator_profile(self, data):
@ -272,33 +280,6 @@ class DownloadManager:
self.cancellation_event.set() self.cancellation_event.set()
if self.thread_pool: if self.thread_pool:
self.thread_pool.shutdown(wait=False, cancel_futures=True) self._log(" Signaling all worker threads to stop and shutting down pool...")
self.thread_pool.shutdown(wait=False)
def _handle_future_result(self, future):
"""Callback for when a worker task finishes."""
if self.active_futures:
try:
self.active_futures.remove(future)
except ValueError:
pass
try:
result = future.result()
# result tuple: (download_count, skip_count, kept_original_filenames, ...)
if result:
self.total_downloads += result[0]
self.total_skips += result[1]
if len(result) > 3 and result[3]:
# filename was kept original
pass
except CancelledError:
pass
except Exception as e:
self._log(f"❌ Worker Error: {e}")
self.processed_posts += 1
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
if not self.active_futures and not self.is_running:
self._log("✅ All tasks completed.")
self.progress_queue.put({'type': 'worker_finished', 'payload': (self.total_downloads, self.total_skips, [], [])})

View File

@ -1,7 +1,6 @@
import sys import sys
import os import os
import time import time
import glob
import queue import queue
import random import random
import traceback import traceback
@ -188,11 +187,6 @@ class DownloaderApp (QWidget ):
self.user_data_path = user_data_path self.user_data_path = user_data_path
self.jobs_dir = os.path.join(self.user_data_path, "jobs")
os.makedirs(self.jobs_dir, exist_ok=True)
self.is_running_job_queue = False
self.current_job_file = None
self.config_file = os.path.join(user_data_path, "Known.txt") self.config_file = os.path.join(user_data_path, "Known.txt")
self.session_file_path = os.path.join(user_data_path, "session.json") self.session_file_path = os.path.join(user_data_path, "session.json")
self.persistent_history_file = os.path.join(user_data_path, "download_history.json") self.persistent_history_file = os.path.join(user_data_path, "download_history.json")
@ -363,178 +357,6 @@ class DownloaderApp (QWidget ):
self._check_for_interrupted_session() self._check_for_interrupted_session()
self._cleanup_after_update() self._cleanup_after_update()
def add_current_settings_to_queue(self):
"""Saves the current UI settings as a JSON job file with creator-specific paths."""
# --- Helper: Append Name to Path safely ---
def get_creator_specific_path(base_dir, folder_name):
if not folder_name:
return base_dir
safe_name = clean_folder_name(folder_name)
# Avoid double pathing (e.g. if base is .../Artist and we append /Artist again)
if base_dir.replace('\\', '/').rstrip('/').endswith(safe_name):
return base_dir
return os.path.join(base_dir, safe_name)
# ------------------------------------------
# --- SCENARIO 1: Items from Creator Selection (Popup) ---
if self.favorite_download_queue:
count = 0
base_settings = self._get_current_ui_settings_as_dict()
items_to_process = list(self.favorite_download_queue)
for item in items_to_process:
real_url = item.get('url')
name = item.get('name', 'Unknown')
if not real_url: continue
job_settings = base_settings.copy()
job_settings['api_url'] = real_url
# Use the name provided by the selection popup
job_settings['output_dir'] = get_creator_specific_path(job_settings['output_dir'], name)
if self._save_single_job_file(job_settings, name_hint=name):
count += 1
if count > 0:
self.log_signal.emit(f"✅ Added {count} jobs to queue from selection.")
self.link_input.clear()
self.favorite_download_queue.clear()
QMessageBox.information(self, "Queue", f"{count} jobs successfully added to queue!")
else:
QMessageBox.warning(self, "Queue Error", "Failed to add selected items to queue.")
return
# --- SCENARIO 2: Manual URL Entry ---
url = self.link_input.text().strip()
if not url:
QMessageBox.warning(self, "Input Error", "Cannot add to queue: URL is empty.")
return
settings = self._get_current_ui_settings_as_dict()
settings['api_url'] = url
# Attempt to resolve name from URL + Cache (creators.json)
service, user_id, post_id = extract_post_info(url)
name_hint = "Job"
if service and user_id:
# Try to find name in your local creators cache
cache_key = (service.lower(), str(user_id))
cached_name = self.creator_name_cache.get(cache_key)
if cached_name:
# CASE A: Creator Found -> Use Creator Name
name_hint = cached_name
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], cached_name)
else:
# CASE B: Creator NOT Found -> Use Post ID or User ID
# If it's a single post link, 'post_id' will have a value.
# If it's a profile link, 'post_id' is None, so we use 'user_id'.
if post_id:
folder_name = str(post_id)
else:
folder_name = str(user_id)
name_hint = folder_name
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], folder_name)
if self._save_single_job_file(settings, name_hint=name_hint):
self.log_signal.emit(f"✅ Job added to queue: {url}")
self.link_input.clear()
QMessageBox.information(self, "Queue", "Job successfully added to queue!")
def _save_single_job_file(self, settings_dict, name_hint="job"):
"""Helper to write a single JSON job file to the jobs directory."""
import uuid
timestamp = int(time.time())
unique_id = uuid.uuid4().hex[:6]
# Clean the name hint to be safe for filenames
safe_name = "".join(c for c in name_hint if c.isalnum() or c in (' ', '_', '-')).strip()
if not safe_name:
safe_name = "job"
filename = f"job_{timestamp}_{safe_name}_{unique_id}.json"
filepath = os.path.join(self.jobs_dir, filename)
try:
with open(filepath, 'w', encoding='utf-8') as f:
json.dump(settings_dict, f, indent=2)
return True
except Exception as e:
self.log_signal.emit(f"❌ Failed to save job file '{filename}': {e}")
return False
def execute_job_queue(self):
"""Starts the queue processing loop."""
job_files = sorted(glob.glob(os.path.join(self.jobs_dir, "job_*.json")))
if not job_files:
QMessageBox.information(self, "Queue Empty", "No job files found in appdata/jobs.")
return
self.log_signal.emit("=" * 40)
self.log_signal.emit(f"🚀 Starting execution of {len(job_files)} queued jobs.")
self.is_running_job_queue = True
self.download_btn.setEnabled(False) # Disable button while running
self.add_queue_btn.setEnabled(False)
self._process_next_queued_job()
def _process_next_queued_job(self):
"""Loads the next job file and starts the download."""
if self.cancellation_event.is_set():
self.is_running_job_queue = False
self.log_signal.emit("🛑 Queue execution cancelled.")
self._update_button_states_and_connections()
return
job_files = sorted(glob.glob(os.path.join(self.jobs_dir, "job_*.json")))
if not job_files:
self.is_running_job_queue = False
self.current_job_file = None
self.log_signal.emit("🏁 All queued jobs finished!")
self.link_input.clear()
QMessageBox.information(self, "Queue Finished", "All queued jobs have been processed.")
self._update_button_states_and_connections()
return
next_job_path = job_files[0]
self.current_job_file = next_job_path
self.log_signal.emit(f"📂 Loading job: {os.path.basename(next_job_path)}")
try:
with open(next_job_path, 'r', encoding='utf-8') as f:
settings = json.load(f)
# --- Ensure Directory Exists ---
# The settings now contain the full path (e.g. E:/Kemono/ArtistName)
target_dir = settings.get('output_dir', '')
if target_dir:
try:
os.makedirs(target_dir, exist_ok=True)
except Exception as e:
self.log_signal.emit(f"⚠️ Warning: Could not pre-create directory '{target_dir}': {e}")
# -------------------------------
# Load settings into UI
self._load_ui_from_settings_dict(settings)
QCoreApplication.processEvents()
# Start download
self.start_download()
except Exception as e:
self.log_signal.emit(f"❌ Error loading/starting job '{next_job_path}': {e}")
failed_path = next_job_path + ".failed"
os.rename(next_job_path, failed_path)
self._process_next_queued_job()
def _run_discord_file_download_thread(self, session, server_id, channel_id, token, output_dir, message_limit=None): def _run_discord_file_download_thread(self, session, server_id, channel_id, token, output_dir, message_limit=None):
""" """
Runs in a background thread to fetch and download all files from a Discord channel. Runs in a background thread to fetch and download all files from a Discord channel.
@ -947,23 +769,6 @@ class DownloaderApp (QWidget ):
is_download_active = self._is_download_active() is_download_active = self._is_download_active()
fetch_first_enabled = self.settings.value(FETCH_FIRST_KEY, False, type=bool) fetch_first_enabled = self.settings.value(FETCH_FIRST_KEY, False, type=bool)
url_text = self.link_input.text().strip()
# --- NEW: Check for Queue Command ---
is_queue_command = (url_text.lower() == "start queue")
# --- NEW: Handle 'Add to Queue' Button State ---
if hasattr(self, 'add_queue_btn'):
# Only enable if not downloading, URL is valid, not in queue mode, and not in specialized fetch states
should_enable_queue = (
not is_download_active and
url_text != "" and
not is_queue_command and
not self.is_ready_to_download_fetched and
not self.is_ready_to_download_batch_update
)
self.add_queue_btn.setEnabled(should_enable_queue)
print(f"--- DEBUG: Updating buttons (is_download_active={is_download_active}) ---") print(f"--- DEBUG: Updating buttons (is_download_active={is_download_active}) ---")
if self.is_ready_to_download_fetched: if self.is_ready_to_download_fetched:
@ -1046,13 +851,8 @@ class DownloaderApp (QWidget ):
num_posts = len(self.fetched_posts_for_download) num_posts = len(self.fetched_posts_for_download)
self.download_btn.setText(f"⬇️ Start Download ({num_posts} Posts)") self.download_btn.setText(f"⬇️ Start Download ({num_posts} Posts)")
self.download_btn.setEnabled(True) # Keep it enabled for the user to click self.download_btn.setEnabled(True) # Keep it enabled for the user to click
else:
# Check if running queue to show specific text
if hasattr(self, 'is_running_job_queue') and self.is_running_job_queue:
self.download_btn.setText("🔄 Processing Queue...")
else: else:
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download")) self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
self.download_btn.setEnabled(False) self.download_btn.setEnabled(False)
self.pause_btn.setText(self._tr("resume_download_button_text", "▶️ Resume Download") if self.is_paused else self._tr("pause_download_button_text", "⏸️ Pause Download")) self.pause_btn.setText(self._tr("resume_download_button_text", "▶️ Resume Download") if self.is_paused else self._tr("pause_download_button_text", "⏸️ Pause Download"))
@ -1065,32 +865,22 @@ class DownloaderApp (QWidget ):
self.cancel_btn.clicked.connect(self.cancel_download_button_action) self.cancel_btn.clicked.connect(self.cancel_download_button_action)
else: else:
# --- IDLE STATE --- url_text = self.link_input.text().strip()
if is_queue_command:
# --- NEW: Queue Execution Mode ---
self.download_btn.setText("🚀 Execute Queue")
self.download_btn.setEnabled(True)
# Ensure the method exists before connecting
if hasattr(self, 'execute_job_queue'):
self.download_btn.clicked.connect(self.execute_job_queue)
else:
_, _, post_id = extract_post_info(url_text) _, _, post_id = extract_post_info(url_text)
is_single_post = bool(post_id) is_single_post = bool(post_id)
if fetch_first_enabled and not is_single_post and url_text: if fetch_first_enabled and not is_single_post:
self.download_btn.setText("📄 Fetch Pages") self.download_btn.setText("📄 Fetch Pages")
else: else:
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download")) self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
self.download_btn.setEnabled(True) self.download_btn.setEnabled(True)
self.download_btn.clicked.connect(self.start_download) self.download_btn.clicked.connect(self.start_download)
self.pause_btn.setText(self._tr("pause_download_button_text", "⏸️ Pause Download")) self.pause_btn.setText(self._tr("pause_download_button_text", "⏸️ Pause Download"))
self.pause_btn.setEnabled(False) self.pause_btn.setEnabled(False)
self.cancel_btn.setText(self._tr("cancel_button_text", "❌ Cancel & Reset UI")) self.cancel_btn.setText(self._tr("cancel_button_text", "❌ Cancel & Reset UI"))
self.cancel_btn.setEnabled(False) self.cancel_btn.setEnabled(False)
def _run_fetch_only_thread(self, fetch_args): def _run_fetch_only_thread(self, fetch_args):
""" """
Runs in a background thread to ONLY fetch all posts without downloading. Runs in a background thread to ONLY fetch all posts without downloading.
@ -5953,14 +5743,6 @@ class DownloaderApp (QWidget ):
if cancelled_by_user: if cancelled_by_user:
self.log_signal.emit("✅ Cancellation complete. Resetting UI.") self.log_signal.emit("✅ Cancellation complete. Resetting UI.")
# --- NEW: Reset Queue State on Cancel ---
if getattr(self, 'is_running_job_queue', False):
self.log_signal.emit("🛑 Queue execution stopped by user.")
self.is_running_job_queue = False
self.current_job_file = None
# ----------------------------------------
self._clear_session_file() self._clear_session_file()
self.interrupted_session_data = None self.interrupted_session_data = None
self.is_restore_pending = False self.is_restore_pending = False
@ -5975,7 +5757,7 @@ class DownloaderApp (QWidget ):
self.log_signal.emit("🏁 Download of current item complete.") self.log_signal.emit("🏁 Download of current item complete.")
# --- EXISTING: FAVORITE QUEUE PROCESSING BLOCK --- # --- QUEUE PROCESSING BLOCK ---
if self.is_processing_favorites_queue and self.favorite_download_queue: if self.is_processing_favorites_queue and self.favorite_download_queue:
self.log_signal.emit("✅ Item finished. Processing next in queue...") self.log_signal.emit("✅ Item finished. Processing next in queue...")
if self.download_thread and isinstance(self.download_thread, QThread): if self.download_thread and isinstance(self.download_thread, QThread):
@ -5991,39 +5773,6 @@ class DownloaderApp (QWidget ):
return return
# --------------------------------------------------------- # ---------------------------------------------------------
# --- NEW: JOB QUEUE CONTINUATION LOGIC ---
# Checks if we are in 'Execute Queue' mode and have a current job file active
if getattr(self, 'is_running_job_queue', False) and getattr(self, 'current_job_file', None):
self.log_signal.emit(f"✅ Job finished. Deleting job file: {os.path.basename(self.current_job_file)}")
# 1. Clean up resources for this specific run
self._finalize_download_history()
if self.thread_pool:
self.thread_pool.shutdown(wait=False)
self.thread_pool = None
self._cleanup_temp_files()
self.single_pdf_setting = False # Reset per job
# 2. Delete the finished job file so it isn't run again
try:
if os.path.exists(self.current_job_file):
os.remove(self.current_job_file)
except Exception as e:
self.log_signal.emit(f"⚠️ Failed to delete finished job file: {e}")
# 3. Reset state for next job
self.current_job_file = None
self.is_finishing = False
# 4. Release lock
self.finish_lock.release()
lock_held = False
# 5. Trigger next job in queue (using QTimer to allow stack to unwind)
QTimer.singleShot(100, self._process_next_queued_job)
return
# -----------------------------------------
if self.is_processing_favorites_queue: if self.is_processing_favorites_queue:
self.is_processing_favorites_queue = False self.is_processing_favorites_queue = False
self.log_signal.emit("✅ All items from the download queue have been processed.") self.log_signal.emit("✅ All items from the download queue have been processed.")
@ -6139,20 +5888,11 @@ class DownloaderApp (QWidget ):
# Reset the finishing lock and exit to let the retry session take over # Reset the finishing lock and exit to let the retry session take over
self.is_finishing = False self.is_finishing = False
# Release lock here as we are returning
self.finish_lock.release()
lock_held = False
return return
self.is_fetcher_thread_running = False self.is_fetcher_thread_running = False
# --- POST DOWNLOAD ACTION (Only if queue is finished or not running queue) ---
if not cancelled_by_user and not self.is_processing_favorites_queue: if not cancelled_by_user and not self.is_processing_favorites_queue:
# If we were running a job queue, we only do this when the queue is EMPTY (handled by _process_next_queued_job)
# But since we return early for job queue continuation above, getting here means
# we are either in a standard download OR the job queue has finished/was cancelled.
if not getattr(self, 'is_running_job_queue', False):
self._execute_post_download_action() self._execute_post_download_action()
self.set_ui_enabled(True) self.set_ui_enabled(True)

View File

@ -347,6 +347,7 @@ def setup_ui(main_app):
left_layout.addLayout(checkboxes_group_layout) left_layout.addLayout(checkboxes_group_layout)
# --- Action Buttons & Remaining UI --- # --- Action Buttons & Remaining UI ---
# ... (The rest of the setup_ui function remains unchanged)
main_app.standard_action_buttons_widget = QWidget() main_app.standard_action_buttons_widget = QWidget()
btn_layout = QHBoxLayout(main_app.standard_action_buttons_widget) btn_layout = QHBoxLayout(main_app.standard_action_buttons_widget)
btn_layout.setContentsMargins(0, 10, 0, 0) btn_layout.setContentsMargins(0, 10, 0, 0)
@ -356,11 +357,6 @@ def setup_ui(main_app):
font.setBold(True) font.setBold(True)
main_app.download_btn.setFont(font) main_app.download_btn.setFont(font)
main_app.download_btn.clicked.connect(main_app.start_download) main_app.download_btn.clicked.connect(main_app.start_download)
main_app.add_queue_btn = QPushButton(" Add to Queue")
main_app.add_queue_btn.setToolTip("Save current settings as a job for later execution.")
main_app.add_queue_btn.clicked.connect(main_app.add_current_settings_to_queue)
main_app.pause_btn = QPushButton("⏸️ Pause Download") main_app.pause_btn = QPushButton("⏸️ Pause Download")
main_app.pause_btn.setEnabled(False) main_app.pause_btn.setEnabled(False)
main_app.pause_btn.clicked.connect(main_app._handle_pause_resume_action) main_app.pause_btn.clicked.connect(main_app._handle_pause_resume_action)
@ -371,7 +367,6 @@ def setup_ui(main_app):
main_app.error_btn.setToolTip("View files skipped due to errors and optionally retry them.") main_app.error_btn.setToolTip("View files skipped due to errors and optionally retry them.")
main_app.error_btn.setEnabled(True) main_app.error_btn.setEnabled(True)
btn_layout.addWidget(main_app.download_btn) btn_layout.addWidget(main_app.download_btn)
btn_layout.addWidget(main_app.add_queue_btn)
btn_layout.addWidget(main_app.pause_btn) btn_layout.addWidget(main_app.pause_btn)
btn_layout.addWidget(main_app.cancel_btn) btn_layout.addWidget(main_app.cancel_btn)
btn_layout.addWidget(main_app.error_btn) btn_layout.addWidget(main_app.error_btn)