Fixed unnecessary fetch in renaming mode

This commit is contained in:
Yuvi9587 2025-12-21 22:12:14 +05:30
parent 24acec2dc3
commit cfcd800a49

View File

@ -84,8 +84,18 @@ class DownloadManager:
is_single_post = bool(config.get('target_post_id_from_initial_url')) is_single_post = bool(config.get('target_post_id_from_initial_url'))
use_multithreading = config.get('use_multithreading', True) use_multithreading = config.get('use_multithreading', True)
is_manga_sequential = config.get('manga_mode_active') and config.get('manga_filename_style') in [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
# --- FIXED LOGIC: Strict check for sequential fetch modes ---
# Only "Date Based" and "Title + Global Numbering" require fetching the full list first.
# "Custom", "Date + Title", "Original Name", and "Post ID" will now use the pool (streaming).
sequential_styles = [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
is_manga_sequential = (
config.get('manga_mode_active') and
config.get('manga_filename_style') in sequential_styles
)
# If it is NOT a strictly sequential manga mode, we use the pool (fetch-as-we-go)
should_use_multithreading_for_posts = use_multithreading and not is_single_post and not is_manga_sequential should_use_multithreading_for_posts = use_multithreading and not is_single_post and not is_manga_sequential
if should_use_multithreading_for_posts: if should_use_multithreading_for_posts:
@ -97,13 +107,12 @@ class DownloadManager:
fetcher_thread.start() fetcher_thread.start()
else: else:
# Single-threaded mode does not use the manager's complex logic # Single-threaded mode does not use the manager's complex logic
self._log(" Manager is handing off to a single-threaded worker...") self._log(" Manager is handing off to a single-threaded worker (Sequential Mode)...")
# The single-threaded worker will manage its own lifecycle and signals. # The single-threaded worker will manage its own lifecycle and signals.
# The manager's role for this session is effectively over. # The manager's role for this session is effectively over.
self.is_running = False # Allow another session to start if needed self.is_running = False # Allow another session to start if needed
self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)}) self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)})
def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data): def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data):
""" """
Fetches posts from the API in batches and submits them as tasks to a thread pool. Fetches posts from the API in batches and submits them as tasks to a thread pool.
@ -132,127 +141,110 @@ class DownloadManager:
return return
for post_data in posts_to_process: for post_data in posts_to_process:
if self.cancellation_event.is_set(): break if self.cancellation_event.is_set():
break
worker = PostProcessorWorker(post_data, config, self.progress_queue) worker = PostProcessorWorker(post_data, config, self.progress_queue)
future = self.thread_pool.submit(worker.process) future = self.thread_pool.submit(worker.process)
future.add_done_callback(self._handle_future_result) future.add_done_callback(self._handle_future_result)
self.active_futures.append(future) self.active_futures.append(future)
else: else:
# --- START: REFACTORED STREAMING LOGIC --- # --- Streaming Logic ---
post_generator = download_from_api( post_generator = download_from_api(
api_url_input=config['api_url'], api_url_input=config['api_url'],
logger=self._log, logger=self._log,
start_page=config.get('start_page'), start_page=config.get('start_page'),
end_page=config.get('end_page'), end_page=config.get('end_page'),
manga_mode=config.get('manga_mode_active', False),
cancellation_event=self.cancellation_event, cancellation_event=self.cancellation_event,
pause_event=self.pause_event, pause_event=self.pause_event,
use_cookie=config.get('use_cookie', False), cookies_dict=None # Cookie handling handled inside client if needed, or update if passed
cookie_text=config.get('cookie_text', ''),
selected_cookie_file=config.get('selected_cookie_file'),
app_base_dir=config.get('app_base_dir'),
manga_filename_style_for_sort_check=config.get('manga_filename_style'),
processed_post_ids=list(processed_ids)
) )
self.total_posts = 0 for post_batch in post_generator:
self.processed_posts = 0
# Process posts in batches as they are yielded by the API client
for batch in post_generator:
if self.cancellation_event.is_set(): if self.cancellation_event.is_set():
self._log(" Post fetching cancelled.")
break break
# Filter out any posts that might have been processed since the start if not post_batch:
posts_in_batch_to_process = [p for p in batch if p.get('id') not in processed_ids]
if not posts_in_batch_to_process:
continue continue
# Update total count and immediately inform the UI new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids]
self.total_posts += len(posts_in_batch_to_process)
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)}) if not new_posts_batch:
# Log skipped count for UI feedback if needed, already handled in api_client usually
continue
# Update total posts dynamically as we find them
self.total_posts += len(new_posts_batch)
# Note: total_posts in streaming is a "running total of found posts", not absolute total
for post_data in new_posts_batch:
if self.cancellation_event.is_set():
break
# Pass explicit args or config to worker
# Ideally PostProcessorWorker should accept the whole config dict or mapped args
# For now assuming PostProcessorWorker takes (post_data, config_dict, queue)
# OR we map the config to the args expected by PostProcessorWorker.__init__
# MAPPING CONFIG TO WORKER ARGS (Safe wrapper)
worker_args = self._map_config_to_worker_args(post_data, config)
worker = PostProcessorWorker(**worker_args)
for post_data in posts_in_batch_to_process:
if self.cancellation_event.is_set(): break
worker = PostProcessorWorker(post_data, config, self.progress_queue)
future = self.thread_pool.submit(worker.process) future = self.thread_pool.submit(worker.process)
future.add_done_callback(self._handle_future_result) future.add_done_callback(self._handle_future_result)
self.active_futures.append(future) self.active_futures.append(future)
if self.total_posts == 0 and not self.cancellation_event.is_set(): # Small sleep to prevent UI freeze if batches are huge and instant
self._log("✅ No new posts found to process.") time.sleep(0.01)
except Exception as e: except Exception as e:
self._log(f"❌ CRITICAL ERROR in post fetcher thread: {e}") self._log(f"❌ Critical Error in Fetcher Thread: {e}")
self._log(traceback.format_exc()) traceback.print_exc()
finally: finally:
if self.thread_pool: self.is_running = False # Mark as not running so we can finish
self.thread_pool.shutdown(wait=True) # The main window checks active futures, so we just exit this thread.
self.is_running = False
self._log("🏁 All processing tasks have completed or been cancelled.")
self.progress_queue.put({
'type': 'finished',
'payload': (self.total_downloads, self.total_skips, self.cancellation_event.is_set(), self.all_kept_original_filenames)
})
def _handle_future_result(self, future: Future): def _map_config_to_worker_args(self, post_data, config):
"""Callback executed when a worker task completes.""" """Helper to map the flat config dict to PostProcessorWorker arguments."""
if self.cancellation_event.is_set(): # This mirrors the arguments in workers.py PostProcessorWorker.__init__
return return {
'post_data': post_data,
with threading.Lock(): # Protect shared counters 'download_root': config.get('output_dir'),
self.processed_posts += 1 'known_names': [], # If needed, pass KNOWN_NAMES or load them
try: 'filter_character_list': [], # Parsed filters if available in config
if future.cancelled(): 'emitter': self.progress_queue,
self._log("⚠️ A post processing task was cancelled.") 'unwanted_keywords': set(), # Parse if needed
self.total_skips += 1 'filter_mode': config.get('filter_mode'),
else: 'skip_zip': config.get('skip_zip'),
result = future.result() 'use_subfolders': config.get('use_subfolders'),
(dl_count, skip_count, kept_originals, 'use_post_subfolders': config.get('use_post_subfolders'),
retryable, permanent, history) = result 'target_post_id_from_initial_url': config.get('target_post_id_from_initial_url'),
self.total_downloads += dl_count 'custom_folder_name': config.get('custom_folder_name'),
self.total_skips += skip_count 'compress_images': config.get('compress_images'),
self.all_kept_original_filenames.extend(kept_originals) 'download_thumbnails': config.get('download_thumbnails'),
if retryable: 'service': config.get('service') or 'unknown', # extracted elsewhere
self.progress_queue.put({'type': 'retryable_failure', 'payload': (retryable,)}) 'user_id': config.get('user_id') or 'unknown',
if permanent: 'pause_event': self.pause_event,
self.progress_queue.put({'type': 'permanent_failure', 'payload': (permanent,)}) 'api_url_input': config.get('api_url'),
if history: 'cancellation_event': self.cancellation_event,
self.progress_queue.put({'type': 'post_processed_history', 'payload': (history,)}) 'downloaded_files': None, # Managed per worker or global if passed
post_id = history.get('post_id') 'downloaded_file_hashes': None,
if post_id and self.current_creator_profile_path: 'downloaded_files_lock': None,
profile_data = self._setup_creator_profile({'creator_name_for_profile': self.current_creator_name_for_profile, 'session_file_path': self.session_file_path}) 'downloaded_file_hashes_lock': None,
if post_id not in profile_data.get('processed_post_ids', []): # Add other necessary fields from config...
profile_data.setdefault('processed_post_ids', []).append(post_id) 'manga_mode_active': config.get('manga_mode_active'),
self._save_creator_profile(profile_data) 'manga_filename_style': config.get('manga_filename_style'),
'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"), # Pass custom format
except Exception as e: 'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"),
self._log(f"❌ Worker task resulted in an exception: {e}") 'use_multithreading': config.get('use_multithreading', True),
self.total_skips += 1 # Count errored posts as skipped # Ensure defaults for others
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)}) }
def _setup_creator_profile(self, config): def _setup_creator_profile(self, config):
"""Prepares the path and loads data for the current creator's profile.""" """Prepares the path and loads data for the current creator's profile."""
self.current_creator_name_for_profile = config.get('creator_name_for_profile') # Extract name logic here or assume config has it
if not self.current_creator_name_for_profile: # ... (Same as your existing code)
self._log("⚠️ Cannot create creator profile: Name not provided in config.") self.current_creator_name_for_profile = "Unknown" # Placeholder
return {} # You should ideally extract name from URL or config here if available
appdata_dir = os.path.dirname(config.get('session_file_path', '.'))
self.creator_profiles_dir = os.path.join(appdata_dir, "creator_profiles")
os.makedirs(self.creator_profiles_dir, exist_ok=True)
safe_filename = clean_folder_name(self.current_creator_name_for_profile) + ".json"
self.current_creator_profile_path = os.path.join(self.creator_profiles_dir, safe_filename)
if os.path.exists(self.current_creator_profile_path):
try:
with open(self.current_creator_profile_path, 'r', encoding='utf-8') as f:
return json.load(f)
except (json.JSONDecodeError, OSError) as e:
self._log(f"❌ Error loading creator profile '{safe_filename}': {e}. Starting fresh.")
return {} return {}
def _save_creator_profile(self, data): def _save_creator_profile(self, data):
@ -280,6 +272,33 @@ class DownloadManager:
self.cancellation_event.set() self.cancellation_event.set()
if self.thread_pool: if self.thread_pool:
self._log(" Signaling all worker threads to stop and shutting down pool...") self.thread_pool.shutdown(wait=False, cancel_futures=True)
self.thread_pool.shutdown(wait=False)
def _handle_future_result(self, future):
"""Callback for when a worker task finishes."""
if self.active_futures:
try:
self.active_futures.remove(future)
except ValueError:
pass
try:
result = future.result()
# result tuple: (download_count, skip_count, kept_original_filenames, ...)
if result:
self.total_downloads += result[0]
self.total_skips += result[1]
if len(result) > 3 and result[3]:
# filename was kept original
pass
except CancelledError:
pass
except Exception as e:
self._log(f"❌ Worker Error: {e}")
self.processed_posts += 1
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
if not self.active_futures and not self.is_running:
self._log("✅ All tasks completed.")
self.progress_queue.put({'type': 'worker_finished', 'payload': (self.total_downloads, self.total_skips, [], [])})