This commit is contained in:
Yuvi63771 2025-11-04 09:13:54 +05:30
parent 257111d462
commit 4a93b721e2
4 changed files with 643 additions and 80 deletions

View File

@ -159,8 +159,6 @@ def download_from_api(
if cancellation_event and cancellation_event.is_set(): if cancellation_event and cancellation_event.is_set():
logger(" Download_from_api cancelled at start.") logger(" Download_from_api cancelled at start.")
return return
# The code that defined api_domain was moved from here to the top of the function
if not any(d in api_domain.lower() for d in ['kemono.su', 'kemono.party', 'kemono.cr', 'coomer.su', 'coomer.party', 'coomer.st']): if not any(d in api_domain.lower() for d in ['kemono.su', 'kemono.party', 'kemono.cr', 'coomer.su', 'coomer.party', 'coomer.st']):
logger(f"⚠️ Unrecognized domain '{api_domain}' from input URL. Defaulting to kemono.su for API calls.") logger(f"⚠️ Unrecognized domain '{api_domain}' from input URL. Defaulting to kemono.su for API calls.")
@ -312,6 +310,8 @@ def download_from_api(
current_offset = (start_page - 1) * page_size current_offset = (start_page - 1) * page_size
current_page_num = start_page current_page_num = start_page
logger(f" Starting from page {current_page_num} (calculated offset {current_offset}).") logger(f" Starting from page {current_page_num} (calculated offset {current_offset}).")
# --- START OF MODIFIED BLOCK ---
while True: while True:
if pause_event and pause_event.is_set(): if pause_event and pause_event.is_set():
logger(" Post fetching loop paused...") logger(" Post fetching loop paused...")
@ -321,18 +321,23 @@ def download_from_api(
break break
time.sleep(0.5) time.sleep(0.5)
if not (cancellation_event and cancellation_event.is_set()): logger(" Post fetching loop resumed.") if not (cancellation_event and cancellation_event.is_set()): logger(" Post fetching loop resumed.")
if cancellation_event and cancellation_event.is_set(): if cancellation_event and cancellation_event.is_set():
logger(" Post fetching loop cancelled.") logger(" Post fetching loop cancelled.")
break break
if target_post_id and processed_target_post_flag: if target_post_id and processed_target_post_flag:
break break
if not target_post_id and end_page and current_page_num > end_page: if not target_post_id and end_page and current_page_num > end_page:
logger(f"✅ Reached specified end page ({end_page}) for creator feed. Stopping.") logger(f"✅ Reached specified end page ({end_page}) for creator feed. Stopping.")
break break
try: try:
posts_batch = fetch_posts_paginated(api_base_url, headers, current_offset, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api) # 1. Fetch the raw batch of posts
if not isinstance(posts_batch, list): raw_posts_batch = fetch_posts_paginated(api_base_url, headers, current_offset, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api)
logger(f"❌ API Error: Expected list of posts, got {type(posts_batch)} at page {current_page_num} (offset {current_offset}).") if not isinstance(raw_posts_batch, list):
logger(f"❌ API Error: Expected list of posts, got {type(raw_posts_batch)} at page {current_page_num} (offset {current_offset}).")
break break
except RuntimeError as e: except RuntimeError as e:
if "cancelled by user" in str(e).lower(): if "cancelled by user" in str(e).lower():
@ -344,14 +349,9 @@ def download_from_api(
logger(f"❌ Unexpected error fetching page {current_page_num} (offset {current_offset}): {e}") logger(f"❌ Unexpected error fetching page {current_page_num} (offset {current_offset}): {e}")
traceback.print_exc() traceback.print_exc()
break break
if processed_post_ids:
original_count = len(posts_batch) # 2. Check if the *raw* batch from the API was empty. This is the correct "end" condition.
posts_batch = [post for post in posts_batch if post.get('id') not in processed_post_ids] if not raw_posts_batch:
skipped_count = original_count - len(posts_batch)
if skipped_count > 0:
logger(f" Skipped {skipped_count} already processed post(s) from page {current_page_num}.")
if not posts_batch:
if target_post_id and not processed_target_post_flag: if target_post_id and not processed_target_post_flag:
logger(f"❌ Target post {target_post_id} not found after checking all available pages (API returned no more posts at offset {current_offset}).") logger(f"❌ Target post {target_post_id} not found after checking all available pages (API returned no more posts at offset {current_offset}).")
elif not target_post_id: elif not target_post_id:
@ -359,20 +359,45 @@ def download_from_api(
logger(f"😕 No posts found on the first page checked (page {current_page_num}, offset {current_offset}).") logger(f"😕 No posts found on the first page checked (page {current_page_num}, offset {current_offset}).")
else: else:
logger(f"✅ Reached end of posts (no more content from API at offset {current_offset}).") logger(f"✅ Reached end of posts (no more content from API at offset {current_offset}).")
break break # This break is now correct.
# 3. Filter the batch against processed IDs
posts_batch_to_yield = raw_posts_batch
original_count = len(raw_posts_batch)
if processed_post_ids:
posts_batch_to_yield = [post for post in raw_posts_batch if post.get('id') not in processed_post_ids]
skipped_count = original_count - len(posts_batch_to_yield)
if skipped_count > 0:
logger(f" Skipped {skipped_count} already processed post(s) from page {current_page_num}.")
# 4. Process the *filtered* batch
if target_post_id and not processed_target_post_flag: if target_post_id and not processed_target_post_flag:
matching_post = next((p for p in posts_batch if str(p.get('id')) == str(target_post_id)), None) # Still searching for a specific post
matching_post = next((p for p in posts_batch_to_yield if str(p.get('id')) == str(target_post_id)), None)
if matching_post: if matching_post:
logger(f"🎯 Found target post {target_post_id} on page {current_page_num} (offset {current_offset}).") logger(f"🎯 Found target post {target_post_id} on page {current_page_num} (offset {current_offset}).")
yield [matching_post] yield [matching_post]
processed_target_post_flag = True processed_target_post_flag = True
elif not target_post_id: elif not target_post_id:
yield posts_batch # Downloading a creator feed
if posts_batch_to_yield:
# We found new posts on this page, yield them
yield posts_batch_to_yield
elif original_count > 0:
# We found 0 new posts, but the page *did* have posts (they were just skipped).
# Log this and continue to the next page.
logger(f" No new posts found on page {current_page_num}. Checking next page...")
# If original_count was 0, the `if not raw_posts_batch:` check
# already caught it and broke the loop.
if processed_target_post_flag: if processed_target_post_flag:
break break
current_offset += page_size current_offset += page_size
current_page_num += 1 current_page_num += 1
time.sleep(0.6) time.sleep(0.6)
# --- END OF MODIFIED BLOCK ---
if target_post_id and not processed_target_post_flag and not (cancellation_event and cancellation_event.is_set()): if target_post_id and not processed_target_post_flag and not (cancellation_event and cancellation_event.is_set()):
logger(f"❌ Target post {target_post_id} could not be found after checking all relevant pages (final check after loop).") logger(f"❌ Target post {target_post_id} could not be found after checking all relevant pages (final check after loop).")

View File

@ -22,6 +22,8 @@ from ..main_window import get_app_icon_object
from ...core.api_client import download_from_api from ...core.api_client import download_from_api
from ...utils.network_utils import extract_post_info, prepare_cookies_for_request from ...utils.network_utils import extract_post_info, prepare_cookies_for_request
from ...utils.resolution import get_dark_theme from ...utils.resolution import get_dark_theme
# --- IMPORT THE NEW DIALOG ---
from .UpdateCheckDialog import UpdateCheckDialog
class PostsFetcherThread (QThread ): class PostsFetcherThread (QThread ):
@ -151,8 +153,13 @@ class EmptyPopupDialog (QDialog ):
app_icon =get_app_icon_object () app_icon =get_app_icon_object ()
if app_icon and not app_icon .isNull (): if app_icon and not app_icon .isNull ():
self .setWindowIcon (app_icon ) self .setWindowIcon (app_icon )
# --- MODIFIED: Store a list of profiles now ---
self.update_profiles_list = None
# --- DEPRECATED (kept for compatibility if needed, but new logic won't use them) ---
self.update_profile_data = None self.update_profile_data = None
self.update_creator_name = None self.update_creator_name = None
self .selected_creators_for_queue =[] self .selected_creators_for_queue =[]
self .globally_selected_creators ={} self .globally_selected_creators ={}
self .fetched_posts_data ={} self .fetched_posts_data ={}
@ -321,29 +328,34 @@ class EmptyPopupDialog (QDialog ):
pass pass
def _handle_update_check(self): def _handle_update_check(self):
"""Opens a dialog to select a creator profile and loads it for an update session.""" """
appdata_dir = os.path.join(self.app_base_dir, "appdata") --- MODIFIED FUNCTION ---
profiles_dir = os.path.join(appdata_dir, "creator_profiles") Opens the new UpdateCheckDialog instead of a QFileDialog.
If a profile is selected, it sets the dialog's result properties
and accepts the dialog, just like the old file dialog logic did.
"""
# --- NEW BEHAVIOR ---
# Pass the app_base_dir and a reference to the main app (for translations/theme)
dialog = UpdateCheckDialog(self.app_base_dir, self.parent_app, self)
if not os.path.isdir(profiles_dir): if dialog.exec_() == QDialog.Accepted:
QMessageBox.warning(self, "Directory Not Found", f"The creator profiles directory does not exist yet.\n\nPath: {profiles_dir}") # --- MODIFIED: Get a list of profiles now ---
return selected_profiles = dialog.get_selected_profiles()
if selected_profiles:
filepath, _ = QFileDialog.getOpenFileName(self, "Select Creator Profile for Update", profiles_dir, "JSON Files (*.json)") try:
# --- MODIFIED: Store the list ---
if filepath: self.update_profiles_list = selected_profiles
try:
with open(filepath, 'r', encoding='utf-8') as f: # --- Set deprecated single-profile fields for backward compatibility (optional) ---
data = json.load(f) # --- This helps if other parts of the main window still expect one profile ---
self.update_profile_data = selected_profiles[0]['data']
if 'creator_url' not in data or 'processed_post_ids' not in data: self.update_creator_name = selected_profiles[0]['name']
raise ValueError("Invalid profile format.")
self.accept() # Close EmptyPopupDialog and signal success to main_window
self.update_profile_data = data except Exception as e:
self.update_creator_name = os.path.basename(filepath).replace('.json', '') QMessageBox.critical(self, "Error Loading Profile",
self.accept() # Close the dialog and signal success f"Could not process the selected profile data:\n\n{e}")
except Exception as e: # --- END OF NEW BEHAVIOR ---
QMessageBox.critical(self, "Error Loading Profile", f"Could not load or parse the selected profile file:\n\n{e}")
def _handle_fetch_posts_click (self ): def _handle_fetch_posts_click (self ):
selected_creators =list (self .globally_selected_creators .values ()) selected_creators =list (self .globally_selected_creators .values ())
@ -981,9 +993,14 @@ class EmptyPopupDialog (QDialog ):
def _handle_posts_close_view (self ): def _handle_posts_close_view (self ):
self .right_pane_widget .hide () self .right_pane_widget .hide ()
self .main_splitter .setSizes ([self .width (),0 ]) self .main_splitter .setSizes ([self .width (),0 ])
self .posts_list_widget .itemChanged .disconnect (self ._handle_post_item_check_changed )
# --- MODIFIED: Added check before disconnect ---
if hasattr (self ,'_handle_post_item_check_changed'): if hasattr (self ,'_handle_post_item_check_changed'):
self .posts_title_list_widget .itemChanged .disconnect (self ._handle_post_item_check_changed ) try:
self .posts_title_list_widget .itemChanged .disconnect (self ._handle_post_item_check_changed )
except TypeError:
pass # Already disconnected
self .posts_search_input .setVisible (False ) self .posts_search_input .setVisible (False )
self .posts_search_input .clear () self .posts_search_input .clear ()
self .globally_selected_post_ids .clear () self .globally_selected_post_ids .clear ()

View File

@ -0,0 +1,179 @@
# --- Standard Library Imports ---
import json
import os
import sys
# --- PyQt5 Imports ---
from PyQt5.QtCore import Qt, pyqtSignal
from PyQt5.QtWidgets import (
QDialog, QVBoxLayout, QHBoxLayout, QListWidget, QListWidgetItem,
QPushButton, QMessageBox, QAbstractItemView, QLabel
)
# --- Local Application Imports ---
from ...i18n.translator import get_translation
from ..main_window import get_app_icon_object
from ...utils.resolution import get_dark_theme
class UpdateCheckDialog(QDialog):
"""
A dialog that lists all creator .json profiles with checkboxes
and allows the user to select multiple to check for updates.
"""
def __init__(self, app_base_dir, parent_app_ref, parent=None):
super().__init__(parent)
self.parent_app = parent_app_ref
self.app_base_dir = app_base_dir
self.selected_profiles_list = [] # Will store a list of {'name': ..., 'data': ...}
self._init_ui()
self._load_profiles()
self._retranslate_ui()
# Apply theme from parent
if self.parent_app and self.parent_app.current_theme == "dark":
scale = getattr(self.parent_app, 'scale_factor', 1)
self.setStyleSheet(get_dark_theme(scale))
else:
self.setStyleSheet("")
def _init_ui(self):
"""Initializes the UI components."""
self.setWindowTitle("Check for Updates")
self.setMinimumSize(400, 450)
app_icon = get_app_icon_object()
if app_icon and not app_icon.isNull():
self.setWindowIcon(app_icon)
layout = QVBoxLayout(self)
self.info_label = QLabel("Select creator profiles to check for updates:")
layout.addWidget(self.info_label)
# --- List Widget with Checkboxes ---
self.list_widget = QListWidget()
# No selection mode, we only care about checkboxes
self.list_widget.setSelectionMode(QAbstractItemView.NoSelection)
layout.addWidget(self.list_widget)
# --- All Buttons in One Horizontal Layout ---
button_layout = QHBoxLayout()
button_layout.setSpacing(6) # small even spacing between all buttons
self.select_all_button = QPushButton("Select All")
self.select_all_button.clicked.connect(self._toggle_all_checkboxes)
self.deselect_all_button = QPushButton("Deselect All")
self.deselect_all_button.clicked.connect(self._toggle_all_checkboxes)
self.close_button = QPushButton("Close")
self.close_button.clicked.connect(self.reject)
self.check_button = QPushButton("Check Selected")
self.check_button.clicked.connect(self.on_check_selected)
self.check_button.setDefault(True)
# Add buttons without a stretch (so no large gap)
button_layout.addWidget(self.select_all_button)
button_layout.addWidget(self.deselect_all_button)
button_layout.addWidget(self.close_button)
button_layout.addWidget(self.check_button)
layout.addLayout(button_layout)
def _tr(self, key, default_text=""):
"""Helper to get translation based on current app language."""
if callable(get_translation) and self.parent_app:
return get_translation(self.parent_app.current_selected_language, key, default_text)
return default_text
def _retranslate_ui(self):
"""Translates the UI elements."""
self.setWindowTitle(self._tr("update_check_dialog_title", "Check for Updates"))
self.info_label.setText(self._tr("update_check_dialog_info_multiple", "Select creator profiles to check for updates:"))
self.select_all_button.setText(self._tr("select_all_button_text", "Select All"))
self.deselect_all_button.setText(self._tr("deselect_all_button_text", "Deselect All"))
self.check_button.setText(self._tr("update_check_dialog_check_button", "Check Selected"))
self.close_button.setText(self._tr("update_check_dialog_close_button", "Close"))
def _load_profiles(self):
"""Loads all .json files from the creator_profiles directory as checkable items."""
appdata_dir = os.path.join(self.app_base_dir, "appdata")
profiles_dir = os.path.join(appdata_dir, "creator_profiles")
if not os.path.isdir(profiles_dir):
QMessageBox.warning(self,
self._tr("update_check_dir_not_found_title", "Directory Not Found"),
self._tr("update_check_dir_not_found_msg",
"The creator profiles directory does not exist yet.\n\nPath: {path}")
.format(path=profiles_dir))
return
profiles_found = []
for filename in os.listdir(profiles_dir):
if filename.endswith(".json"):
filepath = os.path.join(profiles_dir, filename)
try:
with open(filepath, 'r', encoding='utf-8') as f:
data = json.load(f)
# Basic validation to ensure it's a valid profile
if 'creator_url' in data and 'processed_post_ids' in data:
creator_name = os.path.splitext(filename)[0]
profiles_found.append({'name': creator_name, 'data': data})
else:
print(f"Skipping invalid profile: {filename}")
except Exception as e:
print(f"Failed to load profile {filename}: {e}")
profiles_found.sort(key=lambda x: x['name'].lower())
for profile_info in profiles_found:
item = QListWidgetItem(profile_info['name'])
item.setData(Qt.UserRole, profile_info)
# --- Make item checkable ---
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
item.setCheckState(Qt.Unchecked)
self.list_widget.addItem(item)
if not profiles_found:
self.list_widget.addItem(self._tr("update_check_no_profiles", "No creator profiles found."))
self.list_widget.setEnabled(False)
self.check_button.setEnabled(False)
self.select_all_button.setEnabled(False)
self.deselect_all_button.setEnabled(False)
def _toggle_all_checkboxes(self):
"""Handles Select All and Deselect All button clicks."""
sender = self.sender()
check_state = Qt.Checked if sender == self.select_all_button else Qt.Unchecked
for i in range(self.list_widget.count()):
item = self.list_widget.item(i)
if item.flags() & Qt.ItemIsUserCheckable:
item.setCheckState(check_state)
def on_check_selected(self):
"""Handles the 'Check Selected' button click."""
self.selected_profiles_list = []
for i in range(self.list_widget.count()):
item = self.list_widget.item(i)
if item.checkState() == Qt.Checked:
profile_info = item.data(Qt.UserRole)
if profile_info:
self.selected_profiles_list.append(profile_info)
if not self.selected_profiles_list:
QMessageBox.warning(self,
self._tr("update_check_no_selection_title", "No Selection"),
self._tr("update_check_no_selection_msg", "Please select at least one creator to check."))
return
self.accept()
def get_selected_profiles(self):
"""Returns the list of profile data selected by the user."""
return self.selected_profiles_list

View File

@ -149,6 +149,7 @@ class DownloaderApp (QWidget ):
external_link_signal =pyqtSignal (str ,str ,str ,str ,str ) external_link_signal =pyqtSignal (str ,str ,str ,str ,str )
file_progress_signal =pyqtSignal (str ,object ) file_progress_signal =pyqtSignal (str ,object )
fetch_only_complete_signal = pyqtSignal(list) fetch_only_complete_signal = pyqtSignal(list)
batch_update_check_complete_signal = pyqtSignal(list)
def __init__(self): def __init__(self):
@ -156,6 +157,10 @@ class DownloaderApp (QWidget ):
self.settings = QSettings(CONFIG_ORGANIZATION_NAME, CONFIG_APP_NAME_MAIN) self.settings = QSettings(CONFIG_ORGANIZATION_NAME, CONFIG_APP_NAME_MAIN)
self.active_update_profile = None self.active_update_profile = None
self.new_posts_for_update = [] self.new_posts_for_update = []
self.active_update_profiles_list = [] # For batch updates
self.fetched_posts_for_batch_update = [] # Stores {'post_data': ..., 'creator_settings': ...}
self.is_ready_to_download_batch_update = False
self.is_finishing = False self.is_finishing = False
self.finish_lock = threading.Lock() self.finish_lock = threading.Lock()
@ -334,7 +339,7 @@ class DownloaderApp (QWidget ):
self.download_location_label_widget = None self.download_location_label_widget = None
self.remove_from_filename_label_widget = None self.remove_from_filename_label_widget = None
self.skip_words_label_widget = None self.skip_words_label_widget = None
self.setWindowTitle("Kemono Downloader v7.5.2") self.setWindowTitle("Kemono Downloader v7.6.0")
setup_ui(self) setup_ui(self)
self._connect_signals() self._connect_signals()
if hasattr(self, 'character_input'): if hasattr(self, 'character_input'):
@ -775,6 +780,17 @@ class DownloaderApp (QWidget ):
self.cancel_btn.clicked.connect(self.reset_application_state) self.cancel_btn.clicked.connect(self.reset_application_state)
return # <-- This 'return' is CRITICAL return # <-- This 'return' is CRITICAL
elif self.is_ready_to_download_batch_update:
num_posts = len(self.fetched_posts_for_batch_update)
self.download_btn.setText(f"⬇️ Start Download ({num_posts} New Posts)")
self.download_btn.setEnabled(True)
self.download_btn.clicked.connect(self.start_download)
self.pause_btn.setEnabled(False)
self.cancel_btn.setText("🗑️ Clear Update")
self.cancel_btn.setEnabled(True)
self.cancel_btn.clicked.connect(self.reset_application_state)
return
if self.active_update_profile and self.new_posts_for_update and not is_download_active: if self.active_update_profile and self.new_posts_for_update and not is_download_active:
# State: Update confirmation (new posts found, waiting for user to start) # State: Update confirmation (new posts found, waiting for user to start)
num_new = len(self.new_posts_for_update) num_new = len(self.new_posts_for_update)
@ -1130,6 +1146,7 @@ class DownloaderApp (QWidget ):
self .actual_gui_signals .file_successfully_downloaded_signal .connect (self ._handle_actual_file_downloaded ) self .actual_gui_signals .file_successfully_downloaded_signal .connect (self ._handle_actual_file_downloaded )
self.actual_gui_signals.worker_finished_signal.connect(self._handle_worker_result) self.actual_gui_signals.worker_finished_signal.connect(self._handle_worker_result)
self .actual_gui_signals .file_download_status_signal .connect (lambda status :None ) self .actual_gui_signals .file_download_status_signal .connect (lambda status :None )
self.batch_update_check_complete_signal.connect(self._batch_update_check_finished)
self.fetch_only_complete_signal.connect(self._fetch_only_finished) self.fetch_only_complete_signal.connect(self._fetch_only_finished)
if hasattr (self ,'character_input'): if hasattr (self ,'character_input'):
@ -1795,7 +1812,9 @@ class DownloaderApp (QWidget ):
supported_platforms_for_button ={'mega','google drive','dropbox'} supported_platforms_for_button ={'mega','google drive','dropbox'}
has_supported_links =any ( has_supported_links =any (
link_info [3 ].lower ()in supported_platforms_for_button for link_info in self .extracted_links_cache link_info [3 ].lower ()in supported_platforms_for_button for link_info in self .extracted_links_cache
for link_info in self.extracted_links_cache
) )
self .download_extracted_links_button .setEnabled (is_only_links and has_supported_links ) self .download_extracted_links_button .setEnabled (is_only_links and has_supported_links )
@ -3515,6 +3534,18 @@ class DownloaderApp (QWidget ):
return get_theme_stylesheet(actual_scale) return get_theme_stylesheet(actual_scale)
def start_download(self, direct_api_url=None, override_output_dir=None, is_restore=False, is_continuation=False, item_type_from_queue=None): def start_download(self, direct_api_url=None, override_output_dir=None, is_restore=False, is_continuation=False, item_type_from_queue=None):
if not is_restore and not is_continuation:
if self.main_log_output: self.main_log_output.clear()
if self.external_log_output: self.external_log_output.clear()
if self.missed_character_log_output: self.missed_character_log_output.clear()
self.missed_key_terms_buffer.clear()
self.already_logged_bold_key_terms.clear()
if self.is_ready_to_download_batch_update:
self._start_download_of_batch_update()
return True
if not direct_api_url: if not direct_api_url:
api_url_text = self.link_input.text().strip().lower() api_url_text = self.link_input.text().strip().lower()
batch_handlers = { batch_handlers = {
@ -4411,6 +4442,7 @@ class DownloaderApp (QWidget ):
if self.pause_event: self.pause_event.clear() if self.pause_event: self.pause_event.clear()
self.is_paused = False self.is_paused = False
return True return True
def restore_download(self): def restore_download(self):
"""Initiates the download restoration process.""" """Initiates the download restoration process."""
if self._is_download_active(): if self._is_download_active():
@ -4574,6 +4606,294 @@ class DownloaderApp (QWidget ):
self .log_signal .emit (f" {len (list_of_permanent_failure_details )} file(s) from single-thread download marked as permanently failed for this session.") self .log_signal .emit (f" {len (list_of_permanent_failure_details )} file(s) from single-thread download marked as permanently failed for this session.")
self._update_error_button_count() self._update_error_button_count()
def _start_batch_update_check(self, profiles_list):
"""Launches a background thread to check multiple profiles for updates."""
self.set_ui_enabled(False)
self.progress_label.setText(self._tr("batch_update_checking", "Checking for updates..."))
self.cancellation_event.clear()
# Start the background thread
self.download_thread = threading.Thread(
target=self._run_batch_update_check_thread,
args=(profiles_list,),
daemon=True
)
self.download_thread.start()
self._update_button_states_and_connections()
def _run_batch_update_check_thread(self, profiles_list):
"""
(BACKGROUND THREAD)
Iterates profiles, calls download_from_api for each, and collects new posts.
"""
master_new_post_list = []
total_profiles = len(profiles_list)
for i, profile in enumerate(profiles_list):
if self.cancellation_event.is_set():
break
profile_name = profile.get('name', 'Unknown')
self.log_signal.emit(f"Checking {profile_name} ({i+1}/{total_profiles})...")
try:
profile_data = profile.get('data', {})
url = profile_data.get('creator_url', [])[0] # Get first URL
processed_ids = set(profile_data.get('processed_post_ids', []))
creator_settings = profile_data.get('settings', {})
# Use common cookie settings from the UI
use_cookie = self.use_cookie_checkbox.isChecked()
cookie_text = self.cookie_text_input.text()
cookie_file = self.selected_cookie_filepath
post_generator = download_from_api(
api_url_input=url,
logger=lambda msg: None, # Suppress logs
cancellation_event=self.cancellation_event,
pause_event=self.pause_event,
use_cookie=use_cookie,
cookie_text=cookie_text,
selected_cookie_file=cookie_file,
app_base_dir=self.app_base_dir,
processed_post_ids=processed_ids,
end_page=5
)
for post_batch in post_generator:
if self.cancellation_event.is_set(): break
for post_data in post_batch:
# Store the post AND the ENTIRE profile data
master_new_post_list.append({
'post_data': post_data,
'profile_data': profile_data, # Pass the full profile
'creator_name': profile_name
})
except Exception as e:
self.log_signal.emit(f"❌ Error checking {profile_name}: {e}")
# Emit the final aggregated list
self.batch_update_check_complete_signal.emit(master_new_post_list)
def _batch_update_check_finished(self, all_new_posts_list):
"""
(GUI THREAD)
Called when the batch update check is complete. Updates UI.
"""
self.download_thread = None # Clear the thread
if self.cancellation_event.is_set():
self.log_signal.emit(" Update check was cancelled.")
self.reset_application_state() # Full reset
return
if not all_new_posts_list:
self.log_signal.emit("✅ All selected creators are up to date! No new posts found.")
QMessageBox.information(self, "Up to Date", "No new posts were found for the selected creators.")
self.reset_application_state() # Full reset
return
total_posts = len(all_new_posts_list)
# --- MODIFIED BLOCK ---
# Get the set of unique creator names who have new posts
creators_with_new_posts = sorted(list(set(p['creator_name'] for p in all_new_posts_list)))
total_creators = len(creators_with_new_posts)
self.log_signal.emit("=" * 40)
# Add the new line you requested
if creators_with_new_posts:
self.log_signal.emit(f"Creators With New Posts - {', '.join(creators_with_new_posts)}")
# Log the original summary line
self.log_signal.emit(f"✅ Update check complete. Found {total_posts} new post(s) across {total_creators} creator(s).")
# --- END OF MODIFIED BLOCK ---
self.log_signal.emit(" Click 'Start Download' to begin.")
self.fetched_posts_for_batch_update = all_new_posts_list
self.is_ready_to_download_batch_update = True
self.progress_label.setText(f"Found {total_posts} new posts. Ready to download.")
self.set_ui_enabled(True) # Re-enable UI
self._update_button_states_and_connections() # Update buttons to "Start Download (X)"
def _start_download_of_batch_update(self):
"""
(GUI THREAD)
Initiates the download of the posts found during the batch update check.
--- THIS IS THE CORRECTED ROBUST VERSION ---
"""
self.is_ready_to_download_batch_update = False
self.log_signal.emit("=" * 40)
self.log_signal.emit(f"🚀 Starting batch download for {len(self.fetched_posts_for_batch_update)} new post(s)...")
if self.main_log_output: self.main_log_output.clear()
if self.external_log_output: self.external_log_output.clear()
if self.missed_character_log_output: self.missed_character_log_output.clear()
self.missed_key_terms_buffer.clear()
self.already_logged_bold_key_terms.clear()
self.set_ui_enabled(False)
num_threads = int(self.thread_count_input.text()) if self.use_multithreading_checkbox.isChecked() else 1
self.thread_pool = ThreadPoolExecutor(max_workers=num_threads, thread_name_prefix='PostWorker_')
self.total_posts_to_process = len(self.fetched_posts_for_batch_update)
self.processed_posts_count = 0
self.overall_progress_signal.emit(self.total_posts_to_process, 0)
ppw_expected_keys = list(PostProcessorWorker.__init__.__code__.co_varnames)[1:]
# 1. Define all LIVE RUNTIME arguments.
# These are taken from the current app state and are the same for all workers.
live_runtime_args = {
'emitter': self.worker_to_gui_queue,
'creator_name_cache': self.creator_name_cache,
'known_names': list(KNOWN_NAMES),
'unwanted_keywords': FOLDER_NAME_STOP_WORDS,
'pause_event': self.pause_event,
'cancellation_event': self.cancellation_event,
'downloaded_files': self.downloaded_files,
'downloaded_files_lock': self.downloaded_files_lock,
'downloaded_file_hashes': self.downloaded_file_hashes,
'downloaded_file_hashes_lock': self.downloaded_file_hashes_lock,
'dynamic_character_filter_holder': self.dynamic_character_filter_holder,
'num_file_threads': 1, # File threads per post worker
'manga_date_file_counter_ref': None,
'manga_global_file_counter_ref': None,
'creator_download_folder_ignore_words': CREATOR_DOWNLOAD_DEFAULT_FOLDER_IGNORE_WORDS,
'downloaded_hash_counts': self.downloaded_hash_counts,
'downloaded_hash_counts_lock': self.downloaded_hash_counts_lock,
'skip_current_file_flag': None,
'session_file_path': self.session_file_path,
'session_lock': self.session_lock,
'project_root_dir': self.app_base_dir,
'app_base_dir': self.app_base_dir,
'start_offset': 0,
'fetch_first': False,
# Add live cookie settings
'use_cookie': self.use_cookie_checkbox.isChecked(),
'cookie_text': self.cookie_text_input.text(),
'selected_cookie_file': self.selected_cookie_filepath,
}
# 2. Define DEFAULTS for all settings that *should* be in the profile.
# These will be used if the profile is old and missing a key.
default_profile_settings = {
'output_dir': self.dir_input.text().strip(), # Fallback to live UI
'api_url': '',
'character_filter_text': '',
'skip_words_text': '',
'remove_words_text': '',
'custom_folder_name': None,
'filter_mode': 'all',
'text_only_scope': None,
'text_export_format': 'txt',
'single_pdf_mode': False,
'skip_zip': False,
'use_subfolders': False,
'use_post_subfolders': False,
'compress_images': False,
'download_thumbnails': False,
'skip_words_scope': SKIP_SCOPE_FILES,
'char_filter_scope': CHAR_SCOPE_FILES,
'show_external_links': False,
'extract_links_only': False,
'manga_mode_active': False,
'manga_filename_style': STYLE_POST_TITLE,
'allow_multipart_download': False,
'manga_date_prefix': '',
'scan_content_for_images': False,
'use_date_prefix_for_subfolder': False,
'date_prefix_format': "YYYY-MM-DD {post}",
'keep_in_post_duplicates': False,
'keep_duplicates_mode': DUPLICATE_HANDLING_HASH,
'keep_duplicates_limit': 0,
'multipart_scope': 'both',
'multipart_parts_count': 4,
'multipart_min_size_mb': 100,
'manga_custom_filename_format': "{published} {title}",
'manga_custom_date_format': "YYYY-MM-DD",
'target_post_id_from_initial_url': None,
'override_output_dir': None,
'processed_post_ids': [],
}
for item in self.fetched_posts_for_batch_update:
post_data = item['post_data']
# --- THIS IS THE NEW, CORRECTED LOGIC ---
full_profile_data = item.get('profile_data', {})
saved_settings = full_profile_data.get('settings', {})
# --- END OF NEW LOGIC ---
# 3. Construct the final arguments for this specific worker
# Start with a full set of defaults
args_for_this_worker = default_profile_settings.copy()
# Overwrite with any settings saved in the profile
# This is where {"filter_mode": "video"} from Maplestar.json is applied
args_for_this_worker.update(saved_settings)
# Add all the live runtime arguments
args_for_this_worker.update(live_runtime_args)
# 4. Manually parse values from the constructed args
# Set post-specific data
args_for_this_worker['service'] = post_data.get('service')
args_for_this_worker['user_id'] = post_data.get('user')
# Set download_root (which worker expects) from output_dir
args_for_this_worker['download_root'] = args_for_this_worker.get('output_dir')
# Parse filters and commands
raw_filters = args_for_this_worker.get('character_filter_text', '')
parsed_filters, commands = self._parse_character_filters(raw_filters)
args_for_this_worker['filter_character_list'] = parsed_filters
args_for_this_worker['domain_override'] = commands.get('domain_override')
args_for_this_worker['archive_only_mode'] = commands.get('archive_only', False)
args_for_this_worker['sfp_threshold'] = commands.get('sfp_threshold')
args_for_this_worker['handle_unknown_mode'] = commands.get('handle_unknown', False)
# Parse skip words and skip size
skip_words_parts = [part.strip() for part in args_for_this_worker.get('skip_words_text', '').split(',') if part.strip()]
args_for_this_worker['skip_file_size_mb'] = None
args_for_this_worker['skip_words_list'] = []
size_pattern = re.compile(r'\[(\d+)\]')
for part in skip_words_parts:
match = size_pattern.fullmatch(part)
if match:
args_for_this_worker['skip_file_size_mb'] = int(match.group(1))
else:
args_for_this_worker['skip_words_list'].append(part.lower())
# Parse remove_from_filename_words_list
raw_remove_words = args_for_this_worker.get('remove_words_text', '')
args_for_this_worker['remove_from_filename_words_list'] = [word.strip() for word in raw_remove_words.split(',') if word.strip()]
# Ensure processed_post_ids is a list (from the *original* profile data)
args_for_this_worker['processed_post_ids'] = list(full_profile_data.get('processed_post_ids', []))
# Ensure api_url_input is set
args_for_this_worker['api_url_input'] = args_for_this_worker.get('api_url', '')
self._submit_post_to_worker_pool(
post_data,
args_for_this_worker,
1, # File threads per worker (1 for sequential batch)
self.worker_to_gui_queue,
ppw_expected_keys,
{}
)
self.fetched_posts_for_batch_update = []
self.is_fetcher_thread_running = False
self._check_if_all_work_is_done()
def _submit_post_to_worker_pool (self ,post_data_item ,worker_args_template ,num_file_dl_threads_for_each_worker ,emitter_for_worker ,ppw_expected_keys ,ppw_optional_keys_with_defaults ): def _submit_post_to_worker_pool (self ,post_data_item ,worker_args_template ,num_file_dl_threads_for_each_worker ,emitter_for_worker ,ppw_expected_keys ,ppw_optional_keys_with_defaults ):
"""Helper to prepare and submit a single post processing task to the thread pool.""" """Helper to prepare and submit a single post processing task to the thread pool."""
global PostProcessorWorker global PostProcessorWorker
@ -5173,56 +5493,76 @@ class DownloaderApp (QWidget ):
self ._filter_links_log () self ._filter_links_log ()
def cancel_download_button_action(self): def cancel_download_button_action(self):
"""
Handles the user clicking the 'Cancel' button.
This version forcefully shuts down thread pools.
"""
if not self._is_download_active() and not self.is_paused:
self.log_signal.emit(" Cancel button clicked, but no download is active.")
return
if self.is_paused: if self.is_paused:
self.log_signal.emit("❌ Cancellation requested while paused. Stopping all workers...") self.log_signal.emit("❌ Cancellation requested while paused. Stopping all workers...")
if self._is_download_active() and hasattr(self.download_thread, 'cancel'):
self.progress_label.setText(self._tr("status_cancelling", "Cancelling... Please wait."))
self.download_thread.cancel()
else: else:
# Fallback for other download types self.log_signal.emit("❌ Cancellation requested by user. Stopping all workers...")
self.cancellation_event.set()
# Update UI to "Cancelling" state
self.pause_btn.setEnabled(False)
self.cancel_btn.setEnabled(False)
if hasattr(self, 'reset_button'):
self.reset_button.setEnabled(False)
self.progress_label.setText(self._tr("status_cancelling", "Cancelling... Please wait.")) self.progress_label.setText(self._tr("status_cancelling", "Cancelling... Please wait."))
self.pause_btn.setEnabled(False)
self.cancel_btn.setEnabled(False)
if hasattr(self, 'reset_button'):
self.reset_button.setEnabled(False)
# 1. Set the master cancellation event
# This tells all workers to stop *cooperatively*
if not self.cancellation_event.is_set():
self.cancellation_event.set()
# Only call QThread-specific methods if the thread is a QThread # 2. Forcefully shut down QThreads
if self.download_thread and hasattr(self.download_thread, 'requestInterruption'): if self.download_thread and hasattr(self.download_thread, 'requestInterruption'):
self.download_thread.requestInterruption() self.download_thread.requestInterruption()
self.log_signal.emit(" Signaled single download thread to interrupt.") self.log_signal.emit(" Signaled single download thread to interrupt.")
if self.thread_pool:
self.log_signal.emit(" Signaling worker pool to cancel futures...")
if self.external_link_download_thread and self.external_link_download_thread.isRunning(): if self.external_link_download_thread and self.external_link_download_thread.isRunning():
self.log_signal.emit(" Cancelling active External Link download thread...") self.log_signal.emit(" Cancelling active External Link download thread...")
self.external_link_download_thread.cancel() self.external_link_download_thread.cancel()
# ... (add any other QThread .cancel() calls here if you have them) ...
if isinstance(self.download_thread, NhentaiDownloadThread): if isinstance(self.download_thread, NhentaiDownloadThread):
self.log_signal.emit(" Signaling nhentai download thread to cancel.") self.log_signal.emit(" Signaling nhentai download thread to cancel.")
self.download_thread.cancel() self.download_thread.cancel()
if isinstance(self.download_thread, BunkrDownloadThread): if isinstance(self.download_thread, BunkrDownloadThread):
self.log_signal.emit(" Signaling Bunkr download thread to cancel.") self.log_signal.emit(" Signaling Bunkr download thread to cancel.")
self.download_thread.cancel() self.download_thread.cancel()
if isinstance(self.download_thread, Saint2DownloadThread): if isinstance(self.download_thread, Saint2DownloadThread):
self.log_signal.emit(" Signaling Saint2 download thread to cancel.") self.log_signal.emit(" Signaling Saint2 download thread to cancel.")
self.download_thread.cancel() self.download_thread.cancel()
if isinstance(self.download_thread, EromeDownloadThread): if isinstance(self.download_thread, EromeDownloadThread):
self.log_signal.emit(" Signaling Erome download thread to cancel.") self.log_signal.emit(" Signaling Erome download thread to cancel.")
self.download_thread.cancel() self.download_thread.cancel()
if isinstance(self.download_thread, Hentai2readDownloadThread): if isinstance(self.download_thread, Hentai2readDownloadThread):
self.log_signal.emit(" Signaling Hentai2Read download thread to cancel.") self.log_signal.emit(" Signaling Hentai2Read download thread to cancel.")
self.download_thread.cancel() self.download_thread.cancel()
# 3. Forcefully shut down ThreadPoolExecutors
# This is the critical fix for batch/update downloads
if self.thread_pool:
self.log_signal.emit(" Signaling worker pool to shut down...")
# We use cancel_futures=True to actively stop pending tasks
self.thread_pool.shutdown(wait=False, cancel_futures=True)
self.thread_pool = None
self.active_futures = []
self.log_signal.emit(" Worker pool shutdown initiated.")
if hasattr(self, 'retry_thread_pool') and self.retry_thread_pool:
self.log_signal.emit(" Signaling retry worker pool to shut down...")
self.retry_thread_pool.shutdown(wait=False, cancel_futures=True)
self.retry_thread_pool = None
self.active_retry_futures = []
self.log_signal.emit(" Retry pool shutdown initiated.")
# 4. Manually trigger the 'finished' logic to reset the UI
# This is safe because we just shut down all the threads
self.download_finished(0, 0, True, [])
def _get_domain_for_service(self, service_name: str) -> str: def _get_domain_for_service(self, service_name: str) -> str:
"""Determines the base domain for a given service.""" """Determines the base domain for a given service."""
@ -5571,7 +5911,14 @@ class DownloaderApp (QWidget ):
'target_post_id_from_initial_url':None , 'target_post_id_from_initial_url':None ,
'custom_folder_name':None , 'custom_folder_name':None ,
'num_file_threads':1 , 'num_file_threads':1 ,
# --- START: ADDED COOKIE FIX ---
'use_cookie': self.use_cookie_checkbox.isChecked(),
'cookie_text': self.cookie_text_input.text(),
'selected_cookie_file': self.selected_cookie_filepath,
'app_base_dir': self.app_base_dir,
# --- END: ADDED COOKIE FIX ---
'manga_date_file_counter_ref':None , 'manga_date_file_counter_ref':None ,
} }
@ -5918,6 +6265,8 @@ class DownloaderApp (QWidget ):
self.is_fetching_only = False self.is_fetching_only = False
self.fetched_posts_for_download = [] self.fetched_posts_for_download = []
self.is_ready_to_download_fetched = False self.is_ready_to_download_fetched = False
self.fetched_posts_for_batch_update = []
self.is_ready_to_download_batch_update = False
self.allcomic_warning_shown = False self.allcomic_warning_shown = False
self.set_ui_enabled(True) self.set_ui_enabled(True)
@ -6270,21 +6619,16 @@ class DownloaderApp (QWidget ):
return return
dialog = EmptyPopupDialog(self.app_base_dir, self) dialog = EmptyPopupDialog(self.app_base_dir, self)
if dialog.exec_() == QDialog.Accepted: if dialog.exec_() == QDialog.Accepted:
if dialog.update_profile_data: # --- NEW BATCH UPDATE LOGIC ---
self.active_update_profile = dialog.update_profile_data if hasattr(dialog, 'update_profiles_list') and dialog.update_profiles_list:
self.link_input.setText(dialog.update_creator_name) self.active_update_profiles_list = dialog.update_profiles_list
self.favorite_download_queue.clear() self.log_signal.emit(f" Loaded {len(self.active_update_profiles_list)} creator profile(s). Checking for updates...")
self.link_input.setText(f"{len(self.active_update_profiles_list)} profiles loaded for update check...")
if 'settings' in self.active_update_profile: self._start_batch_update_check(self.active_update_profiles_list)
self.log_signal.emit(f" Applying saved settings from '{dialog.update_creator_name}' profile...")
self._load_ui_from_settings_dict(self.active_update_profile['settings'])
self.log_signal.emit(" Settings restored.")
self.log_signal.emit(f" Loaded profile for '{dialog.update_creator_name}'. Click 'Check For Updates' to continue.") # --- Original logic for adding creators to queue ---
self._update_button_states_and_connections()
elif hasattr(dialog, 'selected_creators_for_queue') and dialog.selected_creators_for_queue: elif hasattr(dialog, 'selected_creators_for_queue') and dialog.selected_creators_for_queue:
self.active_update_profile = None self.active_update_profile = None # Ensure single update mode is off
self.favorite_download_queue.clear() self.favorite_download_queue.clear()
for creator_data in dialog.selected_creators_for_queue: for creator_data in dialog.selected_creators_for_queue:
@ -6314,9 +6658,7 @@ class DownloaderApp (QWidget ):
if hasattr(self, 'link_input'): if hasattr(self, 'link_input'):
self.last_link_input_text_for_queue_sync = self.link_input.text() self.last_link_input_text_for_queue_sync = self.link_input.text()
# Manually trigger the UI update now that the queue is populated and the dialog is closed.
self.update_ui_for_manga_mode(self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False) self.update_ui_for_manga_mode(self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False)
def _load_saved_cookie_settings(self): def _load_saved_cookie_settings(self):
"""Loads and applies saved cookie settings on startup.""" """Loads and applies saved cookie settings on startup."""
try: try: