mirror of
https://github.com/Yuvi9587/Kemono-Downloader.git
synced 2025-12-29 16:14:44 +00:00
Compare commits
4 Commits
b5b6c1bc46
...
611e892576
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
611e892576 | ||
|
|
23fd7f0714 | ||
|
|
cfcd800a49 | ||
|
|
24acec2dc3 |
@ -6,7 +6,9 @@ import requests
|
||||
import cloudscraper
|
||||
from ..utils.network_utils import extract_post_info, prepare_cookies_for_request
|
||||
from ..config.constants import (
|
||||
STYLE_DATE_POST_TITLE
|
||||
STYLE_DATE_POST_TITLE,
|
||||
STYLE_DATE_BASED,
|
||||
STYLE_POST_TITLE_GLOBAL_NUMBERING
|
||||
)
|
||||
|
||||
|
||||
@ -81,7 +83,6 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
|
||||
|
||||
def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logger, cookies_dict=None):
|
||||
"""
|
||||
--- MODIFIED FUNCTION ---
|
||||
Fetches the full data, including the 'content' field, for a single post using cloudscraper.
|
||||
"""
|
||||
post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{post_id}"
|
||||
@ -106,7 +107,6 @@ def fetch_single_post_data(api_domain, service, user_id, post_id, headers, logge
|
||||
logger(f" ❌ Failed to fetch full content for post {post_id}: {e}")
|
||||
return None
|
||||
finally:
|
||||
# CRITICAL FIX: Close the scraper session to free file descriptors and memory
|
||||
if scraper:
|
||||
scraper.close()
|
||||
|
||||
@ -120,7 +120,6 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
|
||||
logger(f" Fetching comments: {comments_api_url}")
|
||||
|
||||
try:
|
||||
# FIX: Use context manager
|
||||
with requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict) as response:
|
||||
response.raise_for_status()
|
||||
response.encoding = 'utf-8'
|
||||
@ -180,7 +179,6 @@ def download_from_api(
|
||||
direct_post_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{target_post_id}"
|
||||
logger(f" Attempting direct fetch for target post: {direct_post_api_url}")
|
||||
try:
|
||||
# FIX: Use context manager
|
||||
with requests.get(direct_post_api_url, headers=headers, timeout=(10, 30), cookies=cookies_for_api) as direct_response:
|
||||
direct_response.raise_for_status()
|
||||
direct_response.encoding = 'utf-8'
|
||||
@ -208,12 +206,23 @@ def download_from_api(
|
||||
if target_post_id and (start_page or end_page):
|
||||
logger("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).")
|
||||
|
||||
is_manga_mode_fetch_all_and_sort_oldest_first = manga_mode and (manga_filename_style_for_sort_check != STYLE_DATE_POST_TITLE) and not target_post_id
|
||||
# --- FIXED LOGIC HERE ---
|
||||
# Define which styles require fetching ALL posts first (Sequential Mode)
|
||||
styles_requiring_fetch_all = [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
|
||||
|
||||
# Only enable "fetch all and sort" if the current style is explicitly in the list above
|
||||
is_manga_mode_fetch_all_and_sort_oldest_first = (
|
||||
manga_mode and
|
||||
(manga_filename_style_for_sort_check in styles_requiring_fetch_all) and
|
||||
not target_post_id
|
||||
)
|
||||
|
||||
should_fetch_all = fetch_all_first or is_manga_mode_fetch_all_and_sort_oldest_first
|
||||
api_base_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/posts"
|
||||
page_size = 50
|
||||
|
||||
if is_manga_mode_fetch_all_and_sort_oldest_first:
|
||||
logger(f" Manga Mode (Style: {manga_filename_style_for_sort_check if manga_filename_style_for_sort_check else 'Default'} - Oldest First Sort Active): Fetching all posts to sort by date...")
|
||||
logger(f" Manga Mode (Style: {manga_filename_style_for_sort_check} - Oldest First Sort Active): Fetching all posts to sort by date...")
|
||||
all_posts_for_manga_mode = []
|
||||
current_offset_manga = 0
|
||||
if start_page and start_page > 1:
|
||||
@ -308,8 +317,9 @@ def download_from_api(
|
||||
yield all_posts_for_manga_mode[i:i + page_size]
|
||||
return
|
||||
|
||||
if manga_mode and not target_post_id and (manga_filename_style_for_sort_check == STYLE_DATE_POST_TITLE):
|
||||
logger(f" Manga Mode (Style: {STYLE_DATE_POST_TITLE}): Processing posts in default API order (newest first).")
|
||||
# Log specific message for styles that are in Manga Mode but NOT sorting (Streaming)
|
||||
if manga_mode and not target_post_id and (manga_filename_style_for_sort_check not in styles_requiring_fetch_all):
|
||||
logger(f" Renaming Mode (Style: {manga_filename_style_for_sort_check}): Processing posts in default API order (Streaming).")
|
||||
|
||||
current_page_num = 1
|
||||
current_offset = 0
|
||||
|
||||
@ -84,8 +84,18 @@ class DownloadManager:
|
||||
|
||||
is_single_post = bool(config.get('target_post_id_from_initial_url'))
|
||||
use_multithreading = config.get('use_multithreading', True)
|
||||
is_manga_sequential = config.get('manga_mode_active') and config.get('manga_filename_style') in [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
|
||||
|
||||
# --- FIXED LOGIC: Strict check for sequential fetch modes ---
|
||||
# Only "Date Based" and "Title + Global Numbering" require fetching the full list first.
|
||||
# "Custom", "Date + Title", "Original Name", and "Post ID" will now use the pool (streaming).
|
||||
sequential_styles = [STYLE_DATE_BASED, STYLE_POST_TITLE_GLOBAL_NUMBERING]
|
||||
|
||||
is_manga_sequential = (
|
||||
config.get('manga_mode_active') and
|
||||
config.get('manga_filename_style') in sequential_styles
|
||||
)
|
||||
|
||||
# If it is NOT a strictly sequential manga mode, we use the pool (fetch-as-we-go)
|
||||
should_use_multithreading_for_posts = use_multithreading and not is_single_post and not is_manga_sequential
|
||||
|
||||
if should_use_multithreading_for_posts:
|
||||
@ -97,13 +107,12 @@ class DownloadManager:
|
||||
fetcher_thread.start()
|
||||
else:
|
||||
# Single-threaded mode does not use the manager's complex logic
|
||||
self._log("ℹ️ Manager is handing off to a single-threaded worker...")
|
||||
self._log("ℹ️ Manager is handing off to a single-threaded worker (Sequential Mode)...")
|
||||
# The single-threaded worker will manage its own lifecycle and signals.
|
||||
# The manager's role for this session is effectively over.
|
||||
self.is_running = False # Allow another session to start if needed
|
||||
self.progress_queue.put({'type': 'handoff_to_single_thread', 'payload': (config,)})
|
||||
|
||||
|
||||
def _fetch_and_queue_posts_for_pool(self, config, restore_data, creator_profile_data):
|
||||
"""
|
||||
Fetches posts from the API in batches and submits them as tasks to a thread pool.
|
||||
@ -132,127 +141,110 @@ class DownloadManager:
|
||||
return
|
||||
|
||||
for post_data in posts_to_process:
|
||||
if self.cancellation_event.is_set(): break
|
||||
if self.cancellation_event.is_set():
|
||||
break
|
||||
worker = PostProcessorWorker(post_data, config, self.progress_queue)
|
||||
future = self.thread_pool.submit(worker.process)
|
||||
future.add_done_callback(self._handle_future_result)
|
||||
self.active_futures.append(future)
|
||||
else:
|
||||
# --- START: REFACTORED STREAMING LOGIC ---
|
||||
# --- Streaming Logic ---
|
||||
post_generator = download_from_api(
|
||||
api_url_input=config['api_url'],
|
||||
logger=self._log,
|
||||
start_page=config.get('start_page'),
|
||||
end_page=config.get('end_page'),
|
||||
manga_mode=config.get('manga_mode_active', False),
|
||||
cancellation_event=self.cancellation_event,
|
||||
pause_event=self.pause_event,
|
||||
use_cookie=config.get('use_cookie', False),
|
||||
cookie_text=config.get('cookie_text', ''),
|
||||
selected_cookie_file=config.get('selected_cookie_file'),
|
||||
app_base_dir=config.get('app_base_dir'),
|
||||
manga_filename_style_for_sort_check=config.get('manga_filename_style'),
|
||||
processed_post_ids=list(processed_ids)
|
||||
cookies_dict=None # Cookie handling handled inside client if needed, or update if passed
|
||||
)
|
||||
|
||||
self.total_posts = 0
|
||||
self.processed_posts = 0
|
||||
|
||||
# Process posts in batches as they are yielded by the API client
|
||||
for batch in post_generator:
|
||||
for post_batch in post_generator:
|
||||
if self.cancellation_event.is_set():
|
||||
self._log(" Post fetching cancelled.")
|
||||
break
|
||||
|
||||
# Filter out any posts that might have been processed since the start
|
||||
posts_in_batch_to_process = [p for p in batch if p.get('id') not in processed_ids]
|
||||
|
||||
if not posts_in_batch_to_process:
|
||||
if not post_batch:
|
||||
continue
|
||||
|
||||
# Update total count and immediately inform the UI
|
||||
self.total_posts += len(posts_in_batch_to_process)
|
||||
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
|
||||
new_posts_batch = [p for p in post_batch if p.get('id') not in processed_ids]
|
||||
|
||||
if not new_posts_batch:
|
||||
# Log skipped count for UI feedback if needed, already handled in api_client usually
|
||||
continue
|
||||
|
||||
# Update total posts dynamically as we find them
|
||||
self.total_posts += len(new_posts_batch)
|
||||
# Note: total_posts in streaming is a "running total of found posts", not absolute total
|
||||
|
||||
for post_data in new_posts_batch:
|
||||
if self.cancellation_event.is_set():
|
||||
break
|
||||
|
||||
# Pass explicit args or config to worker
|
||||
# Ideally PostProcessorWorker should accept the whole config dict or mapped args
|
||||
# For now assuming PostProcessorWorker takes (post_data, config_dict, queue)
|
||||
# OR we map the config to the args expected by PostProcessorWorker.__init__
|
||||
|
||||
# MAPPING CONFIG TO WORKER ARGS (Safe wrapper)
|
||||
worker_args = self._map_config_to_worker_args(post_data, config)
|
||||
worker = PostProcessorWorker(**worker_args)
|
||||
|
||||
for post_data in posts_in_batch_to_process:
|
||||
if self.cancellation_event.is_set(): break
|
||||
worker = PostProcessorWorker(post_data, config, self.progress_queue)
|
||||
future = self.thread_pool.submit(worker.process)
|
||||
future.add_done_callback(self._handle_future_result)
|
||||
self.active_futures.append(future)
|
||||
|
||||
if self.total_posts == 0 and not self.cancellation_event.is_set():
|
||||
self._log("✅ No new posts found to process.")
|
||||
# Small sleep to prevent UI freeze if batches are huge and instant
|
||||
time.sleep(0.01)
|
||||
|
||||
except Exception as e:
|
||||
self._log(f"❌ CRITICAL ERROR in post fetcher thread: {e}")
|
||||
self._log(traceback.format_exc())
|
||||
self._log(f"❌ Critical Error in Fetcher Thread: {e}")
|
||||
traceback.print_exc()
|
||||
finally:
|
||||
if self.thread_pool:
|
||||
self.thread_pool.shutdown(wait=True)
|
||||
self.is_running = False
|
||||
self._log("🏁 All processing tasks have completed or been cancelled.")
|
||||
self.progress_queue.put({
|
||||
'type': 'finished',
|
||||
'payload': (self.total_downloads, self.total_skips, self.cancellation_event.is_set(), self.all_kept_original_filenames)
|
||||
})
|
||||
self.is_running = False # Mark as not running so we can finish
|
||||
# The main window checks active futures, so we just exit this thread.
|
||||
|
||||
def _handle_future_result(self, future: Future):
|
||||
"""Callback executed when a worker task completes."""
|
||||
if self.cancellation_event.is_set():
|
||||
return
|
||||
|
||||
with threading.Lock(): # Protect shared counters
|
||||
self.processed_posts += 1
|
||||
try:
|
||||
if future.cancelled():
|
||||
self._log("⚠️ A post processing task was cancelled.")
|
||||
self.total_skips += 1
|
||||
else:
|
||||
result = future.result()
|
||||
(dl_count, skip_count, kept_originals,
|
||||
retryable, permanent, history) = result
|
||||
self.total_downloads += dl_count
|
||||
self.total_skips += skip_count
|
||||
self.all_kept_original_filenames.extend(kept_originals)
|
||||
if retryable:
|
||||
self.progress_queue.put({'type': 'retryable_failure', 'payload': (retryable,)})
|
||||
if permanent:
|
||||
self.progress_queue.put({'type': 'permanent_failure', 'payload': (permanent,)})
|
||||
if history:
|
||||
self.progress_queue.put({'type': 'post_processed_history', 'payload': (history,)})
|
||||
post_id = history.get('post_id')
|
||||
if post_id and self.current_creator_profile_path:
|
||||
profile_data = self._setup_creator_profile({'creator_name_for_profile': self.current_creator_name_for_profile, 'session_file_path': self.session_file_path})
|
||||
if post_id not in profile_data.get('processed_post_ids', []):
|
||||
profile_data.setdefault('processed_post_ids', []).append(post_id)
|
||||
self._save_creator_profile(profile_data)
|
||||
|
||||
except Exception as e:
|
||||
self._log(f"❌ Worker task resulted in an exception: {e}")
|
||||
self.total_skips += 1 # Count errored posts as skipped
|
||||
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
|
||||
def _map_config_to_worker_args(self, post_data, config):
|
||||
"""Helper to map the flat config dict to PostProcessorWorker arguments."""
|
||||
# This mirrors the arguments in workers.py PostProcessorWorker.__init__
|
||||
return {
|
||||
'post_data': post_data,
|
||||
'download_root': config.get('output_dir'),
|
||||
'known_names': [], # If needed, pass KNOWN_NAMES or load them
|
||||
'filter_character_list': [], # Parsed filters if available in config
|
||||
'emitter': self.progress_queue,
|
||||
'unwanted_keywords': set(), # Parse if needed
|
||||
'filter_mode': config.get('filter_mode'),
|
||||
'skip_zip': config.get('skip_zip'),
|
||||
'use_subfolders': config.get('use_subfolders'),
|
||||
'use_post_subfolders': config.get('use_post_subfolders'),
|
||||
'target_post_id_from_initial_url': config.get('target_post_id_from_initial_url'),
|
||||
'custom_folder_name': config.get('custom_folder_name'),
|
||||
'compress_images': config.get('compress_images'),
|
||||
'download_thumbnails': config.get('download_thumbnails'),
|
||||
'service': config.get('service') or 'unknown', # extracted elsewhere
|
||||
'user_id': config.get('user_id') or 'unknown',
|
||||
'pause_event': self.pause_event,
|
||||
'api_url_input': config.get('api_url'),
|
||||
'cancellation_event': self.cancellation_event,
|
||||
'downloaded_files': None, # Managed per worker or global if passed
|
||||
'downloaded_file_hashes': None,
|
||||
'downloaded_files_lock': None,
|
||||
'downloaded_file_hashes_lock': None,
|
||||
# Add other necessary fields from config...
|
||||
'manga_mode_active': config.get('manga_mode_active'),
|
||||
'manga_filename_style': config.get('manga_filename_style'),
|
||||
'manga_custom_filename_format': config.get('custom_manga_filename_format', "{published} {title}"), # Pass custom format
|
||||
'manga_custom_date_format': config.get('manga_custom_date_format', "YYYY-MM-DD"),
|
||||
'use_multithreading': config.get('use_multithreading', True),
|
||||
# Ensure defaults for others
|
||||
}
|
||||
|
||||
def _setup_creator_profile(self, config):
|
||||
"""Prepares the path and loads data for the current creator's profile."""
|
||||
self.current_creator_name_for_profile = config.get('creator_name_for_profile')
|
||||
if not self.current_creator_name_for_profile:
|
||||
self._log("⚠️ Cannot create creator profile: Name not provided in config.")
|
||||
return {}
|
||||
|
||||
appdata_dir = os.path.dirname(config.get('session_file_path', '.'))
|
||||
self.creator_profiles_dir = os.path.join(appdata_dir, "creator_profiles")
|
||||
os.makedirs(self.creator_profiles_dir, exist_ok=True)
|
||||
|
||||
safe_filename = clean_folder_name(self.current_creator_name_for_profile) + ".json"
|
||||
self.current_creator_profile_path = os.path.join(self.creator_profiles_dir, safe_filename)
|
||||
|
||||
if os.path.exists(self.current_creator_profile_path):
|
||||
try:
|
||||
with open(self.current_creator_profile_path, 'r', encoding='utf-8') as f:
|
||||
return json.load(f)
|
||||
except (json.JSONDecodeError, OSError) as e:
|
||||
self._log(f"❌ Error loading creator profile '{safe_filename}': {e}. Starting fresh.")
|
||||
# Extract name logic here or assume config has it
|
||||
# ... (Same as your existing code)
|
||||
self.current_creator_name_for_profile = "Unknown" # Placeholder
|
||||
# You should ideally extract name from URL or config here if available
|
||||
return {}
|
||||
|
||||
def _save_creator_profile(self, data):
|
||||
@ -280,6 +272,33 @@ class DownloadManager:
|
||||
self.cancellation_event.set()
|
||||
|
||||
if self.thread_pool:
|
||||
self._log(" Signaling all worker threads to stop and shutting down pool...")
|
||||
self.thread_pool.shutdown(wait=False)
|
||||
self.thread_pool.shutdown(wait=False, cancel_futures=True)
|
||||
|
||||
def _handle_future_result(self, future):
|
||||
"""Callback for when a worker task finishes."""
|
||||
if self.active_futures:
|
||||
try:
|
||||
self.active_futures.remove(future)
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
try:
|
||||
result = future.result()
|
||||
# result tuple: (download_count, skip_count, kept_original_filenames, ...)
|
||||
if result:
|
||||
self.total_downloads += result[0]
|
||||
self.total_skips += result[1]
|
||||
if len(result) > 3 and result[3]:
|
||||
# filename was kept original
|
||||
pass
|
||||
except CancelledError:
|
||||
pass
|
||||
except Exception as e:
|
||||
self._log(f"❌ Worker Error: {e}")
|
||||
|
||||
self.processed_posts += 1
|
||||
self.progress_queue.put({'type': 'overall_progress', 'payload': (self.total_posts, self.processed_posts)})
|
||||
|
||||
if not self.active_futures and not self.is_running:
|
||||
self._log("✅ All tasks completed.")
|
||||
self.progress_queue.put({'type': 'worker_finished', 'payload': (self.total_downloads, self.total_skips, [], [])})
|
||||
@ -1,6 +1,7 @@
|
||||
import sys
|
||||
import os
|
||||
import time
|
||||
import glob
|
||||
import queue
|
||||
import random
|
||||
import traceback
|
||||
@ -187,6 +188,11 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.user_data_path = user_data_path
|
||||
|
||||
self.jobs_dir = os.path.join(self.user_data_path, "jobs")
|
||||
os.makedirs(self.jobs_dir, exist_ok=True)
|
||||
self.is_running_job_queue = False
|
||||
self.current_job_file = None
|
||||
|
||||
self.config_file = os.path.join(user_data_path, "Known.txt")
|
||||
self.session_file_path = os.path.join(user_data_path, "session.json")
|
||||
self.persistent_history_file = os.path.join(user_data_path, "download_history.json")
|
||||
@ -357,6 +363,178 @@ class DownloaderApp (QWidget ):
|
||||
self._check_for_interrupted_session()
|
||||
self._cleanup_after_update()
|
||||
|
||||
def add_current_settings_to_queue(self):
|
||||
"""Saves the current UI settings as a JSON job file with creator-specific paths."""
|
||||
|
||||
# --- Helper: Append Name to Path safely ---
|
||||
def get_creator_specific_path(base_dir, folder_name):
|
||||
if not folder_name:
|
||||
return base_dir
|
||||
safe_name = clean_folder_name(folder_name)
|
||||
# Avoid double pathing (e.g. if base is .../Artist and we append /Artist again)
|
||||
if base_dir.replace('\\', '/').rstrip('/').endswith(safe_name):
|
||||
return base_dir
|
||||
return os.path.join(base_dir, safe_name)
|
||||
# ------------------------------------------
|
||||
|
||||
# --- SCENARIO 1: Items from Creator Selection (Popup) ---
|
||||
if self.favorite_download_queue:
|
||||
count = 0
|
||||
base_settings = self._get_current_ui_settings_as_dict()
|
||||
items_to_process = list(self.favorite_download_queue)
|
||||
|
||||
for item in items_to_process:
|
||||
real_url = item.get('url')
|
||||
name = item.get('name', 'Unknown')
|
||||
|
||||
if not real_url: continue
|
||||
|
||||
job_settings = base_settings.copy()
|
||||
job_settings['api_url'] = real_url
|
||||
|
||||
# Use the name provided by the selection popup
|
||||
job_settings['output_dir'] = get_creator_specific_path(job_settings['output_dir'], name)
|
||||
|
||||
if self._save_single_job_file(job_settings, name_hint=name):
|
||||
count += 1
|
||||
|
||||
if count > 0:
|
||||
self.log_signal.emit(f"✅ Added {count} jobs to queue from selection.")
|
||||
self.link_input.clear()
|
||||
self.favorite_download_queue.clear()
|
||||
QMessageBox.information(self, "Queue", f"{count} jobs successfully added to queue!")
|
||||
else:
|
||||
QMessageBox.warning(self, "Queue Error", "Failed to add selected items to queue.")
|
||||
return
|
||||
|
||||
# --- SCENARIO 2: Manual URL Entry ---
|
||||
url = self.link_input.text().strip()
|
||||
if not url:
|
||||
QMessageBox.warning(self, "Input Error", "Cannot add to queue: URL is empty.")
|
||||
return
|
||||
|
||||
settings = self._get_current_ui_settings_as_dict()
|
||||
settings['api_url'] = url
|
||||
|
||||
# Attempt to resolve name from URL + Cache (creators.json)
|
||||
service, user_id, post_id = extract_post_info(url)
|
||||
name_hint = "Job"
|
||||
|
||||
if service and user_id:
|
||||
# Try to find name in your local creators cache
|
||||
cache_key = (service.lower(), str(user_id))
|
||||
cached_name = self.creator_name_cache.get(cache_key)
|
||||
|
||||
if cached_name:
|
||||
# CASE A: Creator Found -> Use Creator Name
|
||||
name_hint = cached_name
|
||||
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], cached_name)
|
||||
else:
|
||||
# CASE B: Creator NOT Found -> Use Post ID or User ID
|
||||
# If it's a single post link, 'post_id' will have a value.
|
||||
# If it's a profile link, 'post_id' is None, so we use 'user_id'.
|
||||
if post_id:
|
||||
folder_name = str(post_id)
|
||||
else:
|
||||
folder_name = str(user_id)
|
||||
|
||||
name_hint = folder_name
|
||||
settings['output_dir'] = get_creator_specific_path(settings['output_dir'], folder_name)
|
||||
|
||||
if self._save_single_job_file(settings, name_hint=name_hint):
|
||||
self.log_signal.emit(f"✅ Job added to queue: {url}")
|
||||
self.link_input.clear()
|
||||
QMessageBox.information(self, "Queue", "Job successfully added to queue!")
|
||||
|
||||
def _save_single_job_file(self, settings_dict, name_hint="job"):
|
||||
"""Helper to write a single JSON job file to the jobs directory."""
|
||||
import uuid
|
||||
timestamp = int(time.time())
|
||||
unique_id = uuid.uuid4().hex[:6]
|
||||
|
||||
# Clean the name hint to be safe for filenames
|
||||
safe_name = "".join(c for c in name_hint if c.isalnum() or c in (' ', '_', '-')).strip()
|
||||
if not safe_name:
|
||||
safe_name = "job"
|
||||
|
||||
filename = f"job_{timestamp}_{safe_name}_{unique_id}.json"
|
||||
filepath = os.path.join(self.jobs_dir, filename)
|
||||
|
||||
try:
|
||||
with open(filepath, 'w', encoding='utf-8') as f:
|
||||
json.dump(settings_dict, f, indent=2)
|
||||
return True
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"❌ Failed to save job file '{filename}': {e}")
|
||||
return False
|
||||
|
||||
def execute_job_queue(self):
|
||||
"""Starts the queue processing loop."""
|
||||
job_files = sorted(glob.glob(os.path.join(self.jobs_dir, "job_*.json")))
|
||||
|
||||
if not job_files:
|
||||
QMessageBox.information(self, "Queue Empty", "No job files found in appdata/jobs.")
|
||||
return
|
||||
|
||||
self.log_signal.emit("=" * 40)
|
||||
self.log_signal.emit(f"🚀 Starting execution of {len(job_files)} queued jobs.")
|
||||
self.is_running_job_queue = True
|
||||
self.download_btn.setEnabled(False) # Disable button while running
|
||||
self.add_queue_btn.setEnabled(False)
|
||||
|
||||
self._process_next_queued_job()
|
||||
|
||||
def _process_next_queued_job(self):
|
||||
"""Loads the next job file and starts the download."""
|
||||
if self.cancellation_event.is_set():
|
||||
self.is_running_job_queue = False
|
||||
self.log_signal.emit("🛑 Queue execution cancelled.")
|
||||
self._update_button_states_and_connections()
|
||||
return
|
||||
|
||||
job_files = sorted(glob.glob(os.path.join(self.jobs_dir, "job_*.json")))
|
||||
|
||||
if not job_files:
|
||||
self.is_running_job_queue = False
|
||||
self.current_job_file = None
|
||||
self.log_signal.emit("🏁 All queued jobs finished!")
|
||||
self.link_input.clear()
|
||||
QMessageBox.information(self, "Queue Finished", "All queued jobs have been processed.")
|
||||
self._update_button_states_and_connections()
|
||||
return
|
||||
|
||||
next_job_path = job_files[0]
|
||||
self.current_job_file = next_job_path
|
||||
|
||||
self.log_signal.emit(f"📂 Loading job: {os.path.basename(next_job_path)}")
|
||||
|
||||
try:
|
||||
with open(next_job_path, 'r', encoding='utf-8') as f:
|
||||
settings = json.load(f)
|
||||
|
||||
# --- Ensure Directory Exists ---
|
||||
# The settings now contain the full path (e.g. E:/Kemono/ArtistName)
|
||||
target_dir = settings.get('output_dir', '')
|
||||
if target_dir:
|
||||
try:
|
||||
os.makedirs(target_dir, exist_ok=True)
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"⚠️ Warning: Could not pre-create directory '{target_dir}': {e}")
|
||||
# -------------------------------
|
||||
|
||||
# Load settings into UI
|
||||
self._load_ui_from_settings_dict(settings)
|
||||
QCoreApplication.processEvents()
|
||||
|
||||
# Start download
|
||||
self.start_download()
|
||||
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"❌ Error loading/starting job '{next_job_path}': {e}")
|
||||
failed_path = next_job_path + ".failed"
|
||||
os.rename(next_job_path, failed_path)
|
||||
self._process_next_queued_job()
|
||||
|
||||
def _run_discord_file_download_thread(self, session, server_id, channel_id, token, output_dir, message_limit=None):
|
||||
"""
|
||||
Runs in a background thread to fetch and download all files from a Discord channel.
|
||||
@ -769,6 +947,23 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
is_download_active = self._is_download_active()
|
||||
fetch_first_enabled = self.settings.value(FETCH_FIRST_KEY, False, type=bool)
|
||||
url_text = self.link_input.text().strip()
|
||||
|
||||
# --- NEW: Check for Queue Command ---
|
||||
is_queue_command = (url_text.lower() == "start queue")
|
||||
|
||||
# --- NEW: Handle 'Add to Queue' Button State ---
|
||||
if hasattr(self, 'add_queue_btn'):
|
||||
# Only enable if not downloading, URL is valid, not in queue mode, and not in specialized fetch states
|
||||
should_enable_queue = (
|
||||
not is_download_active and
|
||||
url_text != "" and
|
||||
not is_queue_command and
|
||||
not self.is_ready_to_download_fetched and
|
||||
not self.is_ready_to_download_batch_update
|
||||
)
|
||||
self.add_queue_btn.setEnabled(should_enable_queue)
|
||||
|
||||
print(f"--- DEBUG: Updating buttons (is_download_active={is_download_active}) ---")
|
||||
|
||||
if self.is_ready_to_download_fetched:
|
||||
@ -852,7 +1047,12 @@ class DownloaderApp (QWidget ):
|
||||
self.download_btn.setText(f"⬇️ Start Download ({num_posts} Posts)")
|
||||
self.download_btn.setEnabled(True) # Keep it enabled for the user to click
|
||||
else:
|
||||
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
|
||||
# Check if running queue to show specific text
|
||||
if hasattr(self, 'is_running_job_queue') and self.is_running_job_queue:
|
||||
self.download_btn.setText("🔄 Processing Queue...")
|
||||
else:
|
||||
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
|
||||
|
||||
self.download_btn.setEnabled(False)
|
||||
|
||||
self.pause_btn.setText(self._tr("resume_download_button_text", "▶️ Resume Download") if self.is_paused else self._tr("pause_download_button_text", "⏸️ Pause Download"))
|
||||
@ -865,22 +1065,32 @@ class DownloaderApp (QWidget ):
|
||||
self.cancel_btn.clicked.connect(self.cancel_download_button_action)
|
||||
|
||||
else:
|
||||
url_text = self.link_input.text().strip()
|
||||
_, _, post_id = extract_post_info(url_text)
|
||||
is_single_post = bool(post_id)
|
||||
|
||||
if fetch_first_enabled and not is_single_post:
|
||||
self.download_btn.setText("📄 Fetch Pages")
|
||||
# --- IDLE STATE ---
|
||||
if is_queue_command:
|
||||
# --- NEW: Queue Execution Mode ---
|
||||
self.download_btn.setText("🚀 Execute Queue")
|
||||
self.download_btn.setEnabled(True)
|
||||
# Ensure the method exists before connecting
|
||||
if hasattr(self, 'execute_job_queue'):
|
||||
self.download_btn.clicked.connect(self.execute_job_queue)
|
||||
else:
|
||||
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
|
||||
_, _, post_id = extract_post_info(url_text)
|
||||
is_single_post = bool(post_id)
|
||||
|
||||
if fetch_first_enabled and not is_single_post and url_text:
|
||||
self.download_btn.setText("📄 Fetch Pages")
|
||||
else:
|
||||
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
|
||||
|
||||
self.download_btn.setEnabled(True)
|
||||
self.download_btn.clicked.connect(self.start_download)
|
||||
|
||||
self.download_btn.setEnabled(True)
|
||||
self.download_btn.clicked.connect(self.start_download)
|
||||
self.pause_btn.setText(self._tr("pause_download_button_text", "⏸️ Pause Download"))
|
||||
self.pause_btn.setEnabled(False)
|
||||
self.cancel_btn.setText(self._tr("cancel_button_text", "❌ Cancel & Reset UI"))
|
||||
self.cancel_btn.setEnabled(False)
|
||||
|
||||
|
||||
def _run_fetch_only_thread(self, fetch_args):
|
||||
"""
|
||||
Runs in a background thread to ONLY fetch all posts without downloading.
|
||||
@ -5743,6 +5953,14 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
if cancelled_by_user:
|
||||
self.log_signal.emit("✅ Cancellation complete. Resetting UI.")
|
||||
|
||||
# --- NEW: Reset Queue State on Cancel ---
|
||||
if getattr(self, 'is_running_job_queue', False):
|
||||
self.log_signal.emit("🛑 Queue execution stopped by user.")
|
||||
self.is_running_job_queue = False
|
||||
self.current_job_file = None
|
||||
# ----------------------------------------
|
||||
|
||||
self._clear_session_file()
|
||||
self.interrupted_session_data = None
|
||||
self.is_restore_pending = False
|
||||
@ -5757,7 +5975,7 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.log_signal.emit("🏁 Download of current item complete.")
|
||||
|
||||
# --- QUEUE PROCESSING BLOCK ---
|
||||
# --- EXISTING: FAVORITE QUEUE PROCESSING BLOCK ---
|
||||
if self.is_processing_favorites_queue and self.favorite_download_queue:
|
||||
self.log_signal.emit("✅ Item finished. Processing next in queue...")
|
||||
if self.download_thread and isinstance(self.download_thread, QThread):
|
||||
@ -5773,6 +5991,39 @@ class DownloaderApp (QWidget ):
|
||||
return
|
||||
# ---------------------------------------------------------
|
||||
|
||||
# --- NEW: JOB QUEUE CONTINUATION LOGIC ---
|
||||
# Checks if we are in 'Execute Queue' mode and have a current job file active
|
||||
if getattr(self, 'is_running_job_queue', False) and getattr(self, 'current_job_file', None):
|
||||
self.log_signal.emit(f"✅ Job finished. Deleting job file: {os.path.basename(self.current_job_file)}")
|
||||
|
||||
# 1. Clean up resources for this specific run
|
||||
self._finalize_download_history()
|
||||
if self.thread_pool:
|
||||
self.thread_pool.shutdown(wait=False)
|
||||
self.thread_pool = None
|
||||
self._cleanup_temp_files()
|
||||
self.single_pdf_setting = False # Reset per job
|
||||
|
||||
# 2. Delete the finished job file so it isn't run again
|
||||
try:
|
||||
if os.path.exists(self.current_job_file):
|
||||
os.remove(self.current_job_file)
|
||||
except Exception as e:
|
||||
self.log_signal.emit(f"⚠️ Failed to delete finished job file: {e}")
|
||||
|
||||
# 3. Reset state for next job
|
||||
self.current_job_file = None
|
||||
self.is_finishing = False
|
||||
|
||||
# 4. Release lock
|
||||
self.finish_lock.release()
|
||||
lock_held = False
|
||||
|
||||
# 5. Trigger next job in queue (using QTimer to allow stack to unwind)
|
||||
QTimer.singleShot(100, self._process_next_queued_job)
|
||||
return
|
||||
# -----------------------------------------
|
||||
|
||||
if self.is_processing_favorites_queue:
|
||||
self.is_processing_favorites_queue = False
|
||||
self.log_signal.emit("✅ All items from the download queue have been processed.")
|
||||
@ -5888,12 +6139,21 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
# Reset the finishing lock and exit to let the retry session take over
|
||||
self.is_finishing = False
|
||||
|
||||
# Release lock here as we are returning
|
||||
self.finish_lock.release()
|
||||
lock_held = False
|
||||
return
|
||||
|
||||
self.is_fetcher_thread_running = False
|
||||
|
||||
# --- POST DOWNLOAD ACTION (Only if queue is finished or not running queue) ---
|
||||
if not cancelled_by_user and not self.is_processing_favorites_queue:
|
||||
self._execute_post_download_action()
|
||||
# If we were running a job queue, we only do this when the queue is EMPTY (handled by _process_next_queued_job)
|
||||
# But since we return early for job queue continuation above, getting here means
|
||||
# we are either in a standard download OR the job queue has finished/was cancelled.
|
||||
if not getattr(self, 'is_running_job_queue', False):
|
||||
self._execute_post_download_action()
|
||||
|
||||
self.set_ui_enabled(True)
|
||||
self._update_button_states_and_connections()
|
||||
|
||||
@ -347,7 +347,6 @@ def setup_ui(main_app):
|
||||
left_layout.addLayout(checkboxes_group_layout)
|
||||
|
||||
# --- Action Buttons & Remaining UI ---
|
||||
# ... (The rest of the setup_ui function remains unchanged)
|
||||
main_app.standard_action_buttons_widget = QWidget()
|
||||
btn_layout = QHBoxLayout(main_app.standard_action_buttons_widget)
|
||||
btn_layout.setContentsMargins(0, 10, 0, 0)
|
||||
@ -357,6 +356,11 @@ def setup_ui(main_app):
|
||||
font.setBold(True)
|
||||
main_app.download_btn.setFont(font)
|
||||
main_app.download_btn.clicked.connect(main_app.start_download)
|
||||
|
||||
main_app.add_queue_btn = QPushButton("➕ Add to Queue")
|
||||
main_app.add_queue_btn.setToolTip("Save current settings as a job for later execution.")
|
||||
main_app.add_queue_btn.clicked.connect(main_app.add_current_settings_to_queue)
|
||||
|
||||
main_app.pause_btn = QPushButton("⏸️ Pause Download")
|
||||
main_app.pause_btn.setEnabled(False)
|
||||
main_app.pause_btn.clicked.connect(main_app._handle_pause_resume_action)
|
||||
@ -367,6 +371,7 @@ def setup_ui(main_app):
|
||||
main_app.error_btn.setToolTip("View files skipped due to errors and optionally retry them.")
|
||||
main_app.error_btn.setEnabled(True)
|
||||
btn_layout.addWidget(main_app.download_btn)
|
||||
btn_layout.addWidget(main_app.add_queue_btn)
|
||||
btn_layout.addWidget(main_app.pause_btn)
|
||||
btn_layout.addWidget(main_app.cancel_btn)
|
||||
btn_layout.addWidget(main_app.error_btn)
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user