diff --git a/downloader_utils.py b/downloader_utils.py
index 82a4466..6a00623 100644
--- a/downloader_utils.py
+++ b/downloader_utils.py
@@ -8,6 +8,7 @@ import hashlib
import http .client
import traceback
from concurrent .futures import ThreadPoolExecutor ,Future ,CancelledError ,as_completed
+from collections import deque # Import deque
import html
from PyQt5 .QtCore import QObject ,pyqtSignal ,QThread ,QMutex ,QMutexLocker
from urllib .parse import urlparse
@@ -41,6 +42,7 @@ from io import BytesIO
STYLE_POST_TITLE ="post_title"
STYLE_ORIGINAL_NAME ="original_name"
STYLE_DATE_BASED ="date_based"
+STYLE_DATE_POST_TITLE = "date_post_title" # New style constant
MANGA_DATE_PREFIX_DEFAULT =""
STYLE_POST_TITLE_GLOBAL_NUMBERING ="post_title_global_numbering"
SKIP_SCOPE_FILES ="files"
@@ -509,13 +511,31 @@ def fetch_post_comments (api_domain ,service ,user_id ,post_id ,headers ,logger
raise RuntimeError (f"Error decoding JSON from comments API for post {post_id } ({comments_api_url }): {e }. Response text: {response .text [:200 ]}")
except Exception as e :
raise RuntimeError (f"Unexpected error fetching comments for post {post_id } ({comments_api_url }): {e }")
-def download_from_api (api_url_input ,logger =print ,start_page =None ,end_page =None ,manga_mode =False ,
-cancellation_event =None ,pause_event =None ,use_cookie =False ,cookie_text ="",selected_cookie_file =None ,app_base_dir =None ):
- headers ={'User-Agent':'Mozilla/5.0','Accept':'application/json'}
- service ,user_id ,target_post_id =extract_post_info (api_url_input )
- if cancellation_event and cancellation_event .is_set ():
- logger (" Download_from_api cancelled at start.")
- return
+def download_from_api(
+ api_url_input,
+ logger=print, # type: ignore
+ start_page=None, # type: ignore
+ end_page=None, # type: ignore
+ manga_mode=False, # type: ignore
+ cancellation_event=None, # type: ignore
+ pause_event=None, # type: ignore
+ use_cookie=False, # type: ignore
+ cookie_text="", # type: ignore
+ selected_cookie_file=None, # type: ignore
+ app_base_dir=None, # type: ignore
+ manga_filename_style_for_sort_check=None # type: ignore # Parameter is correctly defined
+):
+ headers = {
+ 'User-Agent': 'Mozilla/5.0',
+ 'Accept': 'application/json'
+ }
+
+ service, user_id, target_post_id = extract_post_info(api_url_input)
+
+ if cancellation_event and cancellation_event.is_set():
+ logger(" Download_from_api cancelled at start.")
+ return
+
parsed_input_url_for_domain =urlparse (api_url_input )
api_domain =parsed_input_url_for_domain .netloc
if not any (d in api_domain .lower ()for d in ['kemono.su','kemono.party','coomer.su','coomer.party']):
@@ -552,11 +572,14 @@ cancellation_event =None ,pause_event =None ,use_cookie =False ,cookie_text ="",
return
if target_post_id and (start_page or end_page ):
logger ("⚠️ Page range (start/end page) is ignored when a specific post URL is provided (searching all pages for the post).")
- is_creator_feed_for_manga =manga_mode and not target_post_id
+ # determine if we should use the "fetch all then sort oldest first" logic for manga mode
+ is_manga_mode_fetch_all_and_sort_oldest_first = manga_mode and \
+ (manga_filename_style_for_sort_check != STYLE_DATE_POST_TITLE) and \
+ not target_post_id
api_base_url =f"https://{api_domain }/api/v1/{service }/user/{user_id }"
page_size =50
- if is_creator_feed_for_manga :
- logger (" Manga Mode: Fetching posts to sort by date (oldest processed first)...")
+ if is_manga_mode_fetch_all_and_sort_oldest_first :
+ logger(f" Manga Mode (Style: {manga_filename_style_for_sort_check if manga_filename_style_for_sort_check else 'Default'} - Oldest First Sort Active): Fetching all posts to sort by date...")
all_posts_for_manga_mode =[]
current_offset_manga =0
if start_page and start_page >1 :
@@ -635,6 +658,12 @@ cancellation_event =None ,pause_event =None ,use_cookie =False ,cookie_text ="",
break
yield all_posts_for_manga_mode [i :i +page_size ]
return
+
+ # If manga_mode is true but we didn't enter the block above,
+ # it means we want newest first for STYLE_DATE_POST_TITLE (or it's a single post URL)
+ if manga_mode and not target_post_id and (manga_filename_style_for_sort_check == STYLE_DATE_POST_TITLE):
+ logger(f" Manga Mode (Style: {STYLE_DATE_POST_TITLE}): Processing posts in default API order (newest first).")
+
current_page_num =1
current_offset =0
processed_target_post_flag =False
@@ -727,8 +756,10 @@ class PostProcessorSignals (QObject ):
file_download_status_signal =pyqtSignal (bool )
external_link_signal =pyqtSignal (str ,str ,str ,str ,str )
file_progress_signal =pyqtSignal (str ,object )
+ file_successfully_downloaded_signal = pyqtSignal(dict) # New signal for successfully downloaded files
missed_character_post_signal =pyqtSignal (str ,str )
class PostProcessorWorker :
+ # ... (other __init__ arguments)
def __init__ (self ,post_data ,download_root ,known_names ,
filter_character_list ,emitter ,
unwanted_keywords ,filter_mode ,skip_zip ,skip_rar ,
@@ -836,7 +867,7 @@ class PostProcessorWorker :
post_title ="",file_index_in_post =0 ,num_files_in_this_post =1 ,
manga_date_file_counter_ref =None ):
was_original_name_kept_flag =False
- manga_global_file_counter_ref =None
+ # manga_global_file_counter_ref =None # This was a duplicate definition, removed
final_filename_saved_for_return =""
def _get_current_character_filters (self ):
if self .dynamic_filter_holder :
@@ -846,7 +877,7 @@ class PostProcessorWorker :
post_title ="",file_index_in_post =0 ,num_files_in_this_post =1 ,
manga_date_file_counter_ref =None ,
forced_filename_override =None ,
- manga_global_file_counter_ref =None ):
+ manga_global_file_counter_ref =None, folder_context_name_for_history=None ): # Added folder_context_name_for_history
was_original_name_kept_flag =False
final_filename_saved_for_return =""
retry_later_details =None
@@ -948,6 +979,48 @@ class PostProcessorWorker :
self .logger (f"⚠️ Manga Title+GlobalNum Mode: Counter ref not provided or malformed for '{api_original_filename }'. Using original. Ref: {manga_global_file_counter_ref }")
filename_to_save_in_main_path =cleaned_original_api_filename
self .logger (f"⚠️ Manga mode (Title+GlobalNum Style Fallback): Using cleaned original filename '{filename_to_save_in_main_path }' for post {original_post_id_for_log }.")
+ elif self.manga_filename_style == STYLE_DATE_POST_TITLE:
+ published_date_str = self.post.get('published')
+ added_date_str = self.post.get('added')
+ formatted_date_str = "nodate" # Default if no date found
+
+ if published_date_str:
+ try:
+ formatted_date_str = published_date_str.split('T')[0]
+ except Exception: # pylint: disable=bare-except
+ self.logger(f" ⚠️ Could not parse 'published' date '{published_date_str}' for STYLE_DATE_POST_TITLE. Using 'nodate'.")
+ elif added_date_str:
+ try:
+ formatted_date_str = added_date_str.split('T')[0]
+ self.logger(f" ⚠️ Post ID {original_post_id_for_log} missing 'published' date, using 'added' date '{added_date_str}' for STYLE_DATE_POST_TITLE naming.")
+ except Exception: # pylint: disable=bare-except
+ self.logger(f" ⚠️ Could not parse 'added' date '{added_date_str}' for STYLE_DATE_POST_TITLE. Using 'nodate'.")
+ else:
+ self.logger(f" ⚠️ Post ID {original_post_id_for_log} missing both 'published' and 'added' dates for STYLE_DATE_POST_TITLE. Using 'nodate'.")
+
+ if post_title and post_title.strip():
+ temp_cleaned_title = clean_filename(post_title.strip())
+ if not temp_cleaned_title or temp_cleaned_title.startswith("untitled_file"):
+ self.logger(f"⚠️ Manga mode (Date+PostTitle Style): Post title for post {original_post_id_for_log} ('{post_title}') was empty or generic after cleaning. Using 'post' as title part.")
+ cleaned_post_title_for_filename = "post"
+ else:
+ cleaned_post_title_for_filename = temp_cleaned_title
+
+ base_name_for_style = f"{formatted_date_str}_{cleaned_post_title_for_filename}"
+
+ if num_files_in_this_post > 1:
+ filename_to_save_in_main_path = f"{base_name_for_style}_{file_index_in_post}{original_ext}" if file_index_in_post > 0 else f"{base_name_for_style}{original_ext}"
+ else: # Single file post
+ filename_to_save_in_main_path = f"{base_name_for_style}{original_ext}"
+ else:
+ self.logger(f"⚠️ Manga mode (Date+PostTitle Style): Post title missing for post {original_post_id_for_log}. Using 'post' as title part with date prefix.")
+ cleaned_post_title_for_filename = "post" # Fallback title part
+ base_name_for_style = f"{formatted_date_str}_{cleaned_post_title_for_filename}"
+ if num_files_in_this_post > 1:
+ filename_to_save_in_main_path = f"{base_name_for_style}_{file_index_in_post}{original_ext}" if file_index_in_post > 0 else f"{base_name_for_style}{original_ext}"
+ else: # Single file post
+ filename_to_save_in_main_path = f"{base_name_for_style}{original_ext}"
+ self .logger (f"⚠️ Manga mode (Title+GlobalNum Style Fallback): Using cleaned original filename '{filename_to_save_in_main_path }' for post {original_post_id_for_log }.")
else :
self .logger (f"⚠️ Manga mode: Unknown filename style '{self .manga_filename_style }'. Defaulting to original filename for '{api_original_filename }'.")
filename_to_save_in_main_path =cleaned_original_api_filename
@@ -1320,7 +1393,23 @@ class PostProcessorWorker :
with self .downloaded_files_lock :self .downloaded_files .add (filename_to_save_in_main_path )
final_filename_saved_for_return =final_filename_on_disk
self .logger (f"✅ Saved: '{final_filename_saved_for_return }' (from '{api_original_filename }', {downloaded_size_bytes /(1024 *1024 ):.2f} MB) in '{os .path .basename (effective_save_folder )}'")
+
+ # Emit signal for successfully downloaded file
+ downloaded_file_details = {
+ 'disk_filename': final_filename_saved_for_return,
+ 'post_title': post_title,
+ 'post_id': original_post_id_for_log,
+ 'upload_date_str': self.post.get('published') or self.post.get('added') or "N/A",
+ 'download_timestamp': time.time(), # Will be recorded by main app
+ 'download_path': effective_save_folder, # The folder it was saved into
+ 'service': self.service,
+ 'user_id': self.user_id,
+ 'api_original_filename': api_original_filename,
+ 'folder_context_name': folder_context_name_for_history or os.path.basename(effective_save_folder) # Best effort context name
+ }
+ self._emit_signal('file_successfully_downloaded', downloaded_file_details)
time .sleep (0.05 )
+
return 1 ,0 ,final_filename_saved_for_return ,was_original_name_kept_flag ,FILE_DOWNLOAD_STATUS_SUCCESS ,None
except Exception as save_err :
self .logger (f"->>Save Fail for '{final_filename_on_disk }': {save_err }")
@@ -1336,18 +1425,20 @@ class PostProcessorWorker :
def process (self ):
- if self ._check_pause (f"Post processing for ID {self .post .get ('id','N/A')}"):return 0 ,0 ,[],[],[]
- if self .check_cancel ():return 0 ,0 ,[],[],[]
+ if self ._check_pause (f"Post processing for ID {self .post .get ('id','N/A')}"):return 0 ,0 ,[],[],[], None
+ if self .check_cancel ():return 0 ,0 ,[],[],[], None
current_character_filters =self ._get_current_character_filters ()
kept_original_filenames_for_log =[]
retryable_failures_this_post =[]
permanent_failures_this_post =[]
total_downloaded_this_post =0
total_skipped_this_post =0
+ history_data_for_this_post = None
+
parsed_api_url =urlparse (self .api_url_input )
referer_url =f"https://{parsed_api_url .netloc }/"
headers ={'User-Agent':'Mozilla/5.0','Referer':referer_url ,'Accept':'*/*'}
- link_pattern =re .compile (r"""]*>(.*?)""",
+ link_pattern =re .compile (r"""]*>(.*?)""", # type: ignore
re .IGNORECASE |re .DOTALL )
post_data =self .post
post_title =post_data .get ('title','')or 'untitled_post'
@@ -1370,17 +1461,17 @@ class PostProcessorWorker :
post_is_candidate_by_file_char_match_in_comment_scope =False
char_filter_that_matched_file_in_comment_scope =None
char_filter_that_matched_comment =None
- if current_character_filters and (self .char_filter_scope ==CHAR_SCOPE_TITLE or self .char_filter_scope ==CHAR_SCOPE_BOTH ):
- if self ._check_pause (f"Character title filter for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[]
+ if current_character_filters and (self .char_filter_scope ==CHAR_SCOPE_TITLE or self .char_filter_scope ==CHAR_SCOPE_BOTH ): # type: ignore
+ if self ._check_pause (f"Character title filter for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
for idx ,filter_item_obj in enumerate (current_character_filters ):
if self .check_cancel ():break
- terms_to_check_for_title =list (filter_item_obj ["aliases"])
+ terms_to_check_for_title =list (filter_item_obj ["aliases"]) # type: ignore
if filter_item_obj ["is_group"]:
- if filter_item_obj ["name"]not in terms_to_check_for_title :
- terms_to_check_for_title .append (filter_item_obj ["name"])
+ if filter_item_obj ["name"]not in terms_to_check_for_title : # type: ignore
+ terms_to_check_for_title .append (filter_item_obj ["name"]) # type: ignore
unique_terms_for_title_check =list (set (terms_to_check_for_title ))
for term_to_match in unique_terms_for_title_check :
- match_found_for_term =is_title_match_for_character (post_title ,term_to_match )
+ match_found_for_term =is_title_match_for_character (post_title ,term_to_match ) # type: ignore
if match_found_for_term :
post_is_candidate_by_title_char_match =True
char_filter_that_matched_title =filter_item_obj
@@ -1402,18 +1493,18 @@ class PostProcessorWorker :
all_files_from_post_api_for_char_check .append ({'_original_name_for_log':original_api_att_name })
if current_character_filters and self .char_filter_scope ==CHAR_SCOPE_COMMENTS :
self .logger (f" [Char Scope: Comments] Phase 1: Checking post files for matches before comments for post ID '{post_id }'.")
- if self ._check_pause (f"File check (comments scope) for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[]
+ if self ._check_pause (f"File check (comments scope) for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
for file_info_item in all_files_from_post_api_for_char_check :
if self .check_cancel ():break
current_api_original_filename_for_check =file_info_item .get ('_original_name_for_log')
if not current_api_original_filename_for_check :continue
for filter_item_obj in current_character_filters :
- terms_to_check =list (filter_item_obj ["aliases"])
- if filter_item_obj ["is_group"]and filter_item_obj ["name"]not in terms_to_check :
- terms_to_check .append (filter_item_obj ["name"])
+ terms_to_check =list (filter_item_obj ["aliases"]) # type: ignore
+ if filter_item_obj ["is_group"]and filter_item_obj ["name"]not in terms_to_check : # type: ignore
+ terms_to_check .append (filter_item_obj ["name"]) # type: ignore
for term_to_match in terms_to_check :
if is_filename_match_for_character (current_api_original_filename_for_check ,term_to_match ):
- post_is_candidate_by_file_char_match_in_comment_scope =True
+ post_is_candidate_by_file_char_match_in_comment_scope =True # type: ignore
char_filter_that_matched_file_in_comment_scope =filter_item_obj
self .logger (f" Match Found (File in Comments Scope): File '{current_api_original_filename_for_check }' matches char filter term '{term_to_match }' (from group/name '{filter_item_obj ['name']}'). Post is candidate.")
break
@@ -1422,7 +1513,7 @@ class PostProcessorWorker :
self .logger (f" [Char Scope: Comments] Phase 1 Result: post_is_candidate_by_file_char_match_in_comment_scope = {post_is_candidate_by_file_char_match_in_comment_scope }")
if current_character_filters and self .char_filter_scope ==CHAR_SCOPE_COMMENTS :
if not post_is_candidate_by_file_char_match_in_comment_scope :
- if self ._check_pause (f"Comment check for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[]
+ if self ._check_pause (f"Comment check for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
self .logger (f" [Char Scope: Comments] Phase 2: No file match found. Checking post comments for post ID '{post_id }'.")
try :
parsed_input_url_for_comments =urlparse (self .api_url_input )
@@ -1444,11 +1535,11 @@ class PostProcessorWorker :
raw_comment_content =comment_item .get ('content','')
if not raw_comment_content :continue
cleaned_comment_text =strip_html_tags (raw_comment_content )
- if not cleaned_comment_text .strip ():continue
+ if not cleaned_comment_text .strip ():continue # type: ignore
for filter_item_obj in current_character_filters :
- terms_to_check_comment =list (filter_item_obj ["aliases"])
- if filter_item_obj ["is_group"]and filter_item_obj ["name"]not in terms_to_check_comment :
- terms_to_check_comment .append (filter_item_obj ["name"])
+ terms_to_check_comment =list (filter_item_obj ["aliases"]) # type: ignore
+ if filter_item_obj ["is_group"]and filter_item_obj ["name"]not in terms_to_check_comment : # type: ignore
+ terms_to_check_comment .append (filter_item_obj ["name"]) # type: ignore
for term_to_match_comment in terms_to_check_comment :
if is_title_match_for_character (cleaned_comment_text ,term_to_match_comment ):
post_is_candidate_by_comment_char_match =True
@@ -1470,32 +1561,33 @@ class PostProcessorWorker :
if current_character_filters :
if self .char_filter_scope ==CHAR_SCOPE_TITLE and not post_is_candidate_by_title_char_match :
self .logger (f" -> Skip Post (Scope: Title - No Char Match): Title '{post_title [:50 ]}' does not match character filters.")
- self ._emit_signal ('missed_character_post',post_title ,"No title match for character filter")
- return 0 ,num_potential_files_in_post ,[],[],[]
+ self ._emit_signal ('missed_character_post',post_title ,"No title match for character filter") # type: ignore
+ return 0 ,num_potential_files_in_post ,[],[],[], None
if self .char_filter_scope ==CHAR_SCOPE_COMMENTS and not post_is_candidate_by_file_char_match_in_comment_scope and not post_is_candidate_by_comment_char_match :
self .logger (f" -> Skip Post (Scope: Comments - No Char Match in Comments): Post ID '{post_id }', Title '{post_title [:50 ]}...'")
if self .emitter and hasattr (self .emitter ,'missed_character_post_signal'):
- self ._emit_signal ('missed_character_post',post_title ,"No character match in files or comments (Comments scope)")
- return 0 ,num_potential_files_in_post ,[],[],[]
+ self ._emit_signal ('missed_character_post',post_title ,"No character match in files or comments (Comments scope)") # type: ignore
+ return 0 ,num_potential_files_in_post ,[],[],[], None
if self .skip_words_list and (self .skip_words_scope ==SKIP_SCOPE_POSTS or self .skip_words_scope ==SKIP_SCOPE_BOTH ):
- if self ._check_pause (f"Skip words (post title) for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[]
+ if self ._check_pause (f"Skip words (post title) for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
post_title_lower =post_title .lower ()
for skip_word in self .skip_words_list :
if skip_word .lower ()in post_title_lower :
self .logger (f" -> Skip Post (Keyword in Title '{skip_word }'): '{post_title [:50 ]}...'. Scope: {self .skip_words_scope }")
- return 0 ,num_potential_files_in_post ,[],[],[]
+ return 0 ,num_potential_files_in_post ,[],[],[], None
if not self .extract_links_only and self .manga_mode_active and current_character_filters and (self .char_filter_scope ==CHAR_SCOPE_TITLE or self .char_filter_scope ==CHAR_SCOPE_BOTH )and not post_is_candidate_by_title_char_match :
self .logger (f" -> Skip Post (Manga Mode with Title/Both Scope - No Title Char Match): Title '{post_title [:50 ]}' doesn't match filters.")
- self ._emit_signal ('missed_character_post',post_title ,"Manga Mode: No title match for character filter (Title/Both scope)")
- return 0 ,num_potential_files_in_post ,[],[],[]
+ self ._emit_signal ('missed_character_post',post_title ,"Manga Mode: No title match for character filter (Title/Both scope)") # type: ignore
+ return 0 ,num_potential_files_in_post ,[],[],[], None
if not isinstance (post_attachments ,list ):
self .logger (f"⚠️ Corrupt attachment data for post {post_id } (expected list, got {type (post_attachments )}). Skipping attachments.")
post_attachments =[]
base_folder_names_for_post_content =[]
+ determined_post_save_path_for_history = self.override_output_dir if self.override_output_dir else self.download_root
if not self .extract_links_only and self .use_subfolders :
- if self ._check_pause (f"Subfolder determination for post {post_id }"):return 0 ,num_potential_files_in_post ,[]
+ if self ._check_pause (f"Subfolder determination for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
primary_char_filter_for_folder =None
- log_reason_for_folder =""
+ log_reason_for_folder ="" # type: ignore
if self .char_filter_scope ==CHAR_SCOPE_COMMENTS and char_filter_that_matched_comment :
if post_is_candidate_by_file_char_match_in_comment_scope and char_filter_that_matched_file_in_comment_scope :
primary_char_filter_for_folder =char_filter_that_matched_file_in_comment_scope
@@ -1506,10 +1598,10 @@ class PostProcessorWorker :
elif (self .char_filter_scope ==CHAR_SCOPE_TITLE or self .char_filter_scope ==CHAR_SCOPE_BOTH )and char_filter_that_matched_title :
primary_char_filter_for_folder =char_filter_that_matched_title
log_reason_for_folder ="Matched char filter in title"
- if primary_char_filter_for_folder :
- base_folder_names_for_post_content =[clean_folder_name (primary_char_filter_for_folder ["name"])]
- cleaned_primary_folder_name =clean_folder_name (primary_char_filter_for_folder ["name"])
- if cleaned_primary_folder_name .lower ()in effective_unwanted_keywords_for_folder_naming and cleaned_primary_folder_name .lower ()!="untitled_folder":
+ if primary_char_filter_for_folder : # type: ignore
+ base_folder_names_for_post_content =[clean_folder_name (primary_char_filter_for_folder ["name"])] # type: ignore
+ cleaned_primary_folder_name =clean_folder_name (primary_char_filter_for_folder ["name"]) # type: ignore
+ if cleaned_primary_folder_name .lower ()in effective_unwanted_keywords_for_folder_naming and cleaned_primary_folder_name .lower ()!="untitled_folder": # type: ignore
self .logger (f" ⚠️ Primary char filter folder name '{cleaned_primary_folder_name }' is in ignore list. Using generic name.")
base_folder_names_for_post_content =["Generic Post Content"]
else :
@@ -1524,7 +1616,7 @@ class PostProcessorWorker :
)
valid_derived_folders_from_title_known_txt =[
- name for name in derived_folders_from_title_via_known_txt
+ name for name in derived_folders_from_title_via_known_txt # type: ignore
if name and name .strip ()and name .lower ()!="untitled_folder"
]
@@ -1541,7 +1633,7 @@ class PostProcessorWorker :
FOLDER_NAME_STOP_WORDS
)
- title_is_only_creator_ignored_words =False
+ title_is_only_creator_ignored_words =False # type: ignore
if candidate_name_from_title_basic_clean and candidate_name_from_title_basic_clean .lower ()!="untitled_folder"and self .creator_download_folder_ignore_words :
candidate_title_words ={word .lower ()for word in candidate_name_from_title_basic_clean .split ()}
@@ -1592,23 +1684,31 @@ class PostProcessorWorker :
if not base_folder_names_for_post_content :
final_fallback_name =clean_folder_name (post_title if post_title and post_title .strip ()else "Generic Post Content")
base_folder_names_for_post_content =[final_fallback_name ]
- self .logger (f" Ultimate fallback folder name: {final_fallback_name }")
+ self .logger (f" Ultimate fallback folder name: {final_fallback_name }") # type: ignore
+
+ if base_folder_names_for_post_content:
+ determined_post_save_path_for_history = os.path.join(determined_post_save_path_for_history, base_folder_names_for_post_content[0])
+
+ if not self.extract_links_only and self.use_post_subfolders:
+ cleaned_post_title_for_sub = clean_folder_name(post_title)
+ determined_post_save_path_for_history = os.path.join(determined_post_save_path_for_history, cleaned_post_title_for_sub)
+
if not self .extract_links_only and self .use_subfolders and self .skip_words_list :
- if self ._check_pause (f"Folder keyword skip check for post {post_id }"):return 0 ,num_potential_files_in_post ,[]
+ if self ._check_pause (f"Folder keyword skip check for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
for folder_name_to_check in base_folder_names_for_post_content :
if not folder_name_to_check :continue
if any (skip_word .lower ()in folder_name_to_check .lower ()for skip_word in self .skip_words_list ):
- matched_skip =next ((sw for sw in self .skip_words_list if sw .lower ()in folder_name_to_check .lower ()),"unknown_skip_word")
- self .logger (f" -> Skip Post (Folder Keyword): Potential folder '{folder_name_to_check }' contains '{matched_skip }'.")
- return 0 ,num_potential_files_in_post ,[],[],[]
- if (self .show_external_links or self .extract_links_only )and post_content_html :
- if self ._check_pause (f"External link extraction for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[]
+ matched_skip =next ((sw for sw in self .skip_words_list if sw .lower ()in folder_name_to_check .lower ()),"unknown_skip_word") # type: ignore
+ self .logger (f" -> Skip Post (Folder Keyword): Potential folder '{folder_name_to_check }' contains '{matched_skip }'.") # type: ignore
+ return 0 ,num_potential_files_in_post ,[],[],[], None
+ if (self .show_external_links or self .extract_links_only )and post_content_html : # type: ignore
+ if self ._check_pause (f"External link extraction for post {post_id }"):return 0 ,num_potential_files_in_post ,[],[],[], None
try :
mega_key_pattern =re .compile (r'\b([a-zA-Z0-9_-]{43}|[a-zA-Z0-9_-]{22})\b')
unique_links_data ={}
for match in link_pattern .finditer (post_content_html ):
link_url =match .group (1 ).strip ()
- link_url =html .unescape (link_url )
+ link_url =html .unescape (link_url ) # type: ignore
link_inner_text =match .group (2 )
if not any (ext in link_url .lower ()for ext in ['.css','.js','.ico','.xml','.svg'])and not link_url .startswith ('javascript:')and link_url not in unique_links_data :
clean_link_text =re .sub (r'<.*?>','',link_inner_text )
@@ -1637,12 +1737,12 @@ class PostProcessorWorker :
decryption_key_found =key_match_in_content .group (1 )
if platform not in scraped_platforms :
self ._emit_signal ('external_link',post_title ,link_text ,link_url ,platform ,decryption_key_found or "")
- links_emitted_count +=1
+ links_emitted_count +=1 # type: ignore
if links_emitted_count >0 :self .logger (f" 🔗 Found {links_emitted_count } potential external link(s) in post content.")
except Exception as e :self .logger (f"⚠️ Error parsing post content for links: {e }\n{traceback .format_exc (limit =2 )}")
if self .extract_links_only :
self .logger (f" Extract Links Only mode: Finished processing post {post_id } for links.")
- return 0 ,0 ,[],[],[]
+ return 0 ,0 ,[],[],[], None
all_files_from_post_api =[]
api_file_domain =urlparse (self .api_url_input ).netloc
if not api_file_domain or not any (d in api_file_domain .lower ()for d in ['kemono.su','kemono.party','coomer.su','coomer.party']):
@@ -1729,22 +1829,22 @@ class PostProcessorWorker :
all_files_from_post_api =[finfo for finfo in all_files_from_post_api if finfo .get ('_from_content_scan')]
if not all_files_from_post_api :
self .logger (f" -> No images found via content scan for post {post_id } in this combined mode.")
- return 0 ,0 ,[],[],[]
+ return 0 ,0 ,[],[],[], None
else :
self .logger (f" Mode: 'Download Thumbnails Only' active. Filtering for API thumbnails for post {post_id }.")
all_files_from_post_api =[finfo for finfo in all_files_from_post_api if finfo .get ('_is_thumbnail')]
if not all_files_from_post_api :
self .logger (f" -> No API image thumbnails found for post {post_id } in thumbnail-only mode.")
- return 0 ,0 ,[],[],[]
+ return 0 ,0 ,[],[],[], None
if self .manga_mode_active and self .manga_filename_style ==STYLE_DATE_BASED :
def natural_sort_key_for_files (file_api_info ):
name =file_api_info .get ('_original_name_for_log','').lower ()
return [int (text )if text .isdigit ()else text for text in re .split ('([0-9]+)',name )]
all_files_from_post_api .sort (key =natural_sort_key_for_files )
- self .logger (f" Manga Date Mode: Sorted {len (all_files_from_post_api )} files within post {post_id } by original name for sequential numbering.")
+ self .logger (f" Manga Date Mode: Sorted {len (all_files_from_post_api )} files within post {post_id } by original name for sequential numbering.") # type: ignore
if not all_files_from_post_api :
- self .logger (f" No files found to download for post {post_id }.")
- return 0 ,0 ,[],[],[]
+ self .logger (f" No files found to download for post {post_id }.") # type: ignore
+ return 0 ,0 ,[],[],[], None
files_to_download_info_list =[]
processed_original_filenames_in_this_post =set ()
for file_info in all_files_from_post_api :
@@ -1758,7 +1858,7 @@ class PostProcessorWorker :
processed_original_filenames_in_this_post .add (current_api_original_filename )
if not files_to_download_info_list :
self .logger (f" All files for post {post_id } were duplicate original names or skipped earlier.")
- return 0 ,total_skipped_this_post ,[],[],[]
+ return 0 ,total_skipped_this_post ,[],[],[], None
self .logger (f" Identified {len (files_to_download_info_list )} unique original file(s) for potential download from post {post_id }.")
with ThreadPoolExecutor (max_workers =self .num_file_threads ,thread_name_prefix =f'P{post_id }File_')as file_pool :
@@ -1773,10 +1873,10 @@ class PostProcessorWorker :
file_is_candidate_by_char_filter_scope =True
else :
if self .char_filter_scope ==CHAR_SCOPE_FILES :
- for filter_item_obj in current_character_filters :
- terms_to_check_for_file =list (filter_item_obj ["aliases"])
- if filter_item_obj ["is_group"]and filter_item_obj ["name"]not in terms_to_check_for_file :
- terms_to_check_for_file .append (filter_item_obj ["name"])
+ for filter_item_obj in current_character_filters : # type: ignore
+ terms_to_check_for_file =list (filter_item_obj ["aliases"]) # type: ignore
+ if filter_item_obj ["is_group"]and filter_item_obj ["name"]not in terms_to_check_for_file : # type: ignore
+ terms_to_check_for_file .append (filter_item_obj ["name"]) # type: ignore
unique_terms_for_file_check =list (set (terms_to_check_for_file ))
for term_to_match in unique_terms_for_file_check :
if is_filename_match_for_character (current_api_original_filename ,term_to_match ):
@@ -1796,10 +1896,10 @@ class PostProcessorWorker :
char_filter_info_that_matched_file =char_filter_that_matched_title
self .logger (f" File '{current_api_original_filename }' is candidate because post title matched. Scope: Both (Title part).")
else :
- for filter_item_obj_both_file in current_character_filters :
- terms_to_check_for_file_both =list (filter_item_obj_both_file ["aliases"])
- if filter_item_obj_both_file ["is_group"]and filter_item_obj_both_file ["name"]not in terms_to_check_for_file_both :
- terms_to_check_for_file_both .append (filter_item_obj_both_file ["name"])
+ for filter_item_obj_both_file in current_character_filters : # type: ignore
+ terms_to_check_for_file_both =list (filter_item_obj_both_file ["aliases"]) # type: ignore
+ if filter_item_obj_both_file ["is_group"]and filter_item_obj_both_file ["name"]not in terms_to_check_for_file_both : # type: ignore
+ terms_to_check_for_file_both .append (filter_item_obj_both_file ["name"]) # type: ignore
unique_terms_for_file_both_check =list (set (terms_to_check_for_file_both ))
for term_to_match in unique_terms_for_file_both_check :
if is_filename_match_for_character (current_api_original_filename ,term_to_match ):
@@ -1829,13 +1929,13 @@ class PostProcessorWorker :
char_title_subfolder_name =None
if self .target_post_id_from_initial_url and self .custom_folder_name :
char_title_subfolder_name =self .custom_folder_name
- elif char_filter_info_that_matched_file :
- char_title_subfolder_name =clean_folder_name (char_filter_info_that_matched_file ["name"])
- elif char_filter_that_matched_title :
- char_title_subfolder_name =clean_folder_name (char_filter_that_matched_title ["name"])
- elif char_filter_that_matched_comment :
- char_title_subfolder_name =clean_folder_name (char_filter_that_matched_comment ["name"])
- if char_title_subfolder_name :
+ elif char_filter_info_that_matched_file : # type: ignore
+ char_title_subfolder_name =clean_folder_name (char_filter_info_that_matched_file ["name"]) # type: ignore
+ elif char_filter_that_matched_title : # type: ignore
+ char_title_subfolder_name =clean_folder_name (char_filter_that_matched_title ["name"]) # type: ignore
+ elif char_filter_that_matched_comment : # type: ignore
+ char_title_subfolder_name =clean_folder_name (char_filter_that_matched_comment ["name"]) # type: ignore
+ if char_title_subfolder_name : # type: ignore
target_base_folders_for_this_file_iteration .append (char_title_subfolder_name )
else :
self .logger (f"⚠️ File '{current_api_original_filename }' candidate by char filter, but no folder name derived. Using post title.")
@@ -1860,13 +1960,16 @@ class PostProcessorWorker :
manga_date_counter_to_pass =self .manga_date_file_counter_ref if self .manga_mode_active and self .manga_filename_style ==STYLE_DATE_BASED else None
manga_global_counter_to_pass =self .manga_global_file_counter_ref if self .manga_mode_active and self .manga_filename_style ==STYLE_POST_TITLE_GLOBAL_NUMBERING else None
+ # Pass the determined folder name for history context
+ folder_context_for_file = target_base_folder_name_for_instance if self.use_subfolders and target_base_folder_name_for_instance else clean_folder_name(post_title)
+
futures_list .append (file_pool .submit (
self ._download_single_file ,
file_info =file_info_to_dl ,
target_folder_path =current_path_for_file_instance ,
headers =headers ,original_post_id_for_log =post_id ,skip_event =self .skip_current_file_flag ,
post_title =post_title ,manga_date_file_counter_ref =manga_date_counter_to_pass ,
- manga_global_file_counter_ref =manga_global_counter_to_pass ,
+ manga_global_file_counter_ref =manga_global_counter_to_pass, folder_context_name_for_history=folder_context_for_file,
file_index_in_post =file_idx ,num_files_in_this_post =len (files_to_download_info_list )
))
@@ -1893,18 +1996,46 @@ class PostProcessorWorker :
self .logger (f"❌ File download task for post {post_id } resulted in error: {exc_f }")
total_skipped_this_post +=1
self ._emit_signal ('file_progress',"",None )
+
+ # --- History Data Collection ---
+ # This part is added to collect data for the history feature.
+ # It's placed after the file processing loop for the post.
+ if not self.extract_links_only and (total_downloaded_this_post > 0 or not ( # Condition: if not extract_links_only AND (files were downloaded OR post wasn't skipped at very start by title/char filter)
+ (current_character_filters and (
+ (self.char_filter_scope == CHAR_SCOPE_TITLE and not post_is_candidate_by_title_char_match) or
+ (self.char_filter_scope == CHAR_SCOPE_COMMENTS and not post_is_candidate_by_file_char_match_in_comment_scope and not post_is_candidate_by_comment_char_match)
+ )) or
+ (self.skip_words_list and (self.skip_words_scope == SKIP_SCOPE_POSTS or self.skip_words_scope == SKIP_SCOPE_BOTH) and any(sw.lower() in post_title.lower() for sw in self.skip_words_list))
+ )):
+ top_file_name_for_history = "N/A"
+ if post_main_file_info and post_main_file_info.get('name'):
+ top_file_name_for_history = post_main_file_info['name']
+ elif post_attachments and post_attachments[0].get('name'):
+ top_file_name_for_history = post_attachments[0]['name']
+
+ history_data_for_this_post = {
+ 'post_title': post_title, 'post_id': post_id,
+ 'top_file_name': top_file_name_for_history,
+ 'num_files': num_potential_files_in_post, # Already calculated
+ 'upload_date_str': post_data.get('published') or post_data.get('added') or "Unknown",
+ 'download_location': determined_post_save_path_for_history, # Calculated earlier
+ 'service': self.service, 'user_id': self.user_id,
+ }
if self .check_cancel ():self .logger (f" Post {post_id } processing interrupted/cancelled.");
else :self .logger (f" Post {post_id } Summary: Downloaded={total_downloaded_this_post }, Skipped Files={total_skipped_this_post }")
- return total_downloaded_this_post ,total_skipped_this_post ,kept_original_filenames_for_log ,retryable_failures_this_post ,permanent_failures_this_post
+ return total_downloaded_this_post ,total_skipped_this_post ,kept_original_filenames_for_log ,retryable_failures_this_post ,permanent_failures_this_post, history_data_for_this_post
class DownloadThread (QThread ):
progress_signal =pyqtSignal (str )
add_character_prompt_signal =pyqtSignal (str )
file_download_status_signal =pyqtSignal (bool )
finished_signal =pyqtSignal (int ,int ,bool ,list )
external_link_signal =pyqtSignal (str ,str ,str ,str ,str )
+ file_successfully_downloaded_signal = pyqtSignal(dict) # Relay from worker
file_progress_signal =pyqtSignal (str ,object )
retryable_file_failed_signal =pyqtSignal (list )
missed_character_post_signal =pyqtSignal (str ,str )
+ post_processed_for_history_signal = pyqtSignal(dict) # New signal for history data
+ final_history_entries_signal = pyqtSignal(list) # New signal for the final 3 history entries
permanent_file_failed_signal =pyqtSignal (list )
def __init__ (self ,api_url_input ,output_dir ,known_names_copy ,
cancellation_event ,
@@ -1987,6 +2118,7 @@ class DownloadThread (QThread ):
self .scan_content_for_images =scan_content_for_images
self .creator_download_folder_ignore_words =creator_download_folder_ignore_words
self .manga_global_file_counter_ref =manga_global_file_counter_ref
+ self.history_candidates_buffer = deque(maxlen=8) # Buffer for the first 8 posts
if self .compress_images and Image is None :
self .logger ("⚠️ Image compression disabled: Pillow library not found (DownloadThread).")
self .compress_images =False
@@ -2052,9 +2184,10 @@ class DownloadThread (QThread ):
worker_signals_obj .file_progress_signal .connect (self .file_progress_signal )
worker_signals_obj .external_link_signal .connect (self .external_link_signal )
worker_signals_obj .missed_character_post_signal .connect (self .missed_character_post_signal )
+ worker_signals_obj.file_successfully_downloaded_signal.connect(self.file_successfully_downloaded_signal) # Connect new signal
self .logger (" Starting post fetch (single-threaded download process)...")
post_generator =download_from_api (
- self .api_url_input ,
+ self .api_url_input , # type: ignore
logger =self .logger ,
start_page =self .start_page ,
end_page =self .end_page ,
@@ -2064,7 +2197,8 @@ class DownloadThread (QThread ):
use_cookie =self .use_cookie ,
cookie_text =self .cookie_text ,
selected_cookie_file =self .selected_cookie_file ,
- app_base_dir =self .app_base_dir
+ app_base_dir =self .app_base_dir ,
+ manga_filename_style_for_sort_check =self .manga_filename_style if self .manga_mode_active else None
)
for posts_batch_data in post_generator :
if self ._check_pause_self ("Post batch processing"):was_process_cancelled =True ;break
@@ -2116,13 +2250,16 @@ class DownloadThread (QThread ):
creator_download_folder_ignore_words =self .creator_download_folder_ignore_words ,
)
try :
- dl_count ,skip_count ,kept_originals_this_post ,retryable_failures ,permanent_failures =post_processing_worker .process ()
+ dl_count ,skip_count ,kept_originals_this_post ,retryable_failures ,permanent_failures, history_data =post_processing_worker .process ()
grand_total_downloaded_files +=dl_count
grand_total_skipped_files +=skip_count
if kept_originals_this_post :
grand_list_of_kept_original_filenames .extend (kept_originals_this_post )
if retryable_failures :
self .retryable_file_failed_signal .emit (retryable_failures )
+ if history_data: # New: Handle history data from worker
+ if len(self.history_candidates_buffer) < 8:
+ self.post_processed_for_history_signal.emit(history_data) # Emit for App to handle
if permanent_failures :
self .permanent_file_failed_signal .emit (permanent_failures )
except Exception as proc_err :
@@ -2138,6 +2275,10 @@ class DownloadThread (QThread ):
if was_process_cancelled :break
if not was_process_cancelled and not self .isInterruptionRequested ():
self .logger ("✅ All posts processed or end of content reached by DownloadThread.")
+
+ # Process history candidates at the end of the thread's run
+ # This part is now handled by DownloaderApp for both single and multi-thread
+
except Exception as main_thread_err :
self .logger (f"\n❌ Critical error within DownloadThread run loop: {main_thread_err }")
traceback .print_exc ()
@@ -2150,6 +2291,7 @@ class DownloadThread (QThread ):
worker_signals_obj .external_link_signal .disconnect (self .external_link_signal )
worker_signals_obj .file_progress_signal .disconnect (self .file_progress_signal )
worker_signals_obj .missed_character_post_signal .disconnect (self .missed_character_post_signal )
+ worker_signals_obj.file_successfully_downloaded_signal.disconnect(self.file_successfully_downloaded_signal) # Disconnect new signal
except (TypeError ,RuntimeError )as e :
self .logger (f"ℹ️ Note during DownloadThread signal disconnection: {e }")
diff --git a/languages.py b/languages.py
index db22f8b..e35d9e6 100644
--- a/languages.py
+++ b/languages.py
@@ -1,4 +1,3 @@
-
translations ={
"en":{
"settings_dialog_title":"Settings",
@@ -155,6 +154,7 @@ translations ={
"manga_style_original_file_text":"Name: Original File",
"manga_style_date_based_text":"Name: Date Based",
"manga_style_title_global_num_text":"Name: Title+G.Num",
+"manga_style_date_post_title_text":"Name: Date + Title",
"manga_style_unknown_text":"Name: Unknown Style",
"manga_style_post_title_tooltip":"""Files are named based on the post's title.
- The first file in a post is named using the cleaned post title (e.g., 'My Chapter 1.jpg').
@@ -166,6 +166,13 @@ Output: 'Chapter One.jpg', 'Chapter One_1.png', 'Chapter One_2.gif'.""",
"manga_style_original_file_tooltip":"Files attempt to keep their original filenames.\n\n- An optional prefix can be entered in the input field that appears next to this button.\n\nExample (with prefix 'MySeries'): 'MySeries_OriginalFile.jpg'.\nExample (no prefix): 'OriginalFile.jpg'.",
"manga_style_date_based_tooltip":"Files are named sequentially (e.g., 001.ext, 002.ext) based on post publication order.\n\n- An optional prefix can be entered in the input field that appears next to this button.\n- Multithreading for post processing is disabled for this style to ensure correct numbering.\n\nExample (with prefix 'MyComic'): 'MyComic_001.jpg', 'MyComic_002.png'.\nExample (no prefix): '001.jpg', '002.png'.",
"manga_style_title_global_num_tooltip":"Files are named with the post's title and a global sequential number across all posts.\n\n- Format: '[Cleaned Post Title]_[Global Counter].[ext]'\n- The counter (e.g., _001, _002) increments for every file downloaded in the current session.\n- Multithreading for post processing is disabled for this style to ensure correct numbering.\n\nExample: Post 'Chapter 1' (2 files) -> 'Chapter 1_001.jpg', 'Chapter 1_002.png'.\nNext post 'Chapter 2' (1 file) -> 'Chapter 2_003.jpg'.",
+"manga_style_date_post_title_tooltip":"""Files are named using the post's publication date and its title.
+- Format: '[YYYY-MM-DD]_[Cleaned Post Title].[ext]'
+- The date is taken from the post's 'published' or 'added' field.
+- If a post has multiple files, subsequent files (after the first) get a numeric suffix like '_1', '_2'.
+
+Example: Post 'Chapter One' (published 2023-01-15, 2 files: a.jpg, b.png)
+Output: '2023-01-15_ChapterOne.jpg', '2023-01-15_ChapterOne_1.png'""",
"manga_style_unknown_tooltip":"The manga filename style is currently unknown. This is unexpected. Please cycle to a valid style.",
"manga_style_cycle_tooltip_suffix":"Click to cycle to the next style.",
"fav_artists_dialog_title":"Favorite Artists",
@@ -279,6 +286,9 @@ Output: 'Chapter One.jpg', 'Chapter One_1.png', 'Chapter One_2.gif'.""",
"new_char_input_tooltip_text":"Enter a new show, game, or character name to add to the list above.",
"link_search_input_placeholder_text":"Search Links...",
"link_search_input_tooltip_text":"When in 'Only Links' mode, type here to filter the displayed links by text, URL, or platform.",
+"manga_date_title_suffix_input_placeholder_text":"Suffix (replaces title)",
+"manga_date_title_suffix_input_tooltip_text":"Optional suffix for 'Date + Title' style.\nIf provided, this text will be used instead of the post title.\nExample: 'My Series Vol 1'",
+"history_button_tooltip_text":"View download history (Not Implemented Yet)",
"manga_date_prefix_input_placeholder_text":"Prefix for Manga Filenames",
"manga_date_prefix_input_tooltip_text":"Optional prefix for 'Date Based' or 'Original File' manga filenames (e.g., 'Series Name').\nIf empty, files will be named based on the style without a prefix.",
"log_display_mode_links_view_text":"🔗 Links View",
@@ -307,6 +317,14 @@ Output: 'Chapter One.jpg', 'Chapter One_1.png', 'Chapter One_2.gif'.""",
"cookie_help_dialog_title":"Cookie File Instructions",
"cookie_help_instruction_intro":"
To use cookies, you typically need a cookies.txt file from your browser.
",
"cookie_help_how_to_get_title":"How to get cookies.txt:
",
+"download_history_dialog_title_first_processed": "First Processed Files History",
+"first_files_processed_header": "First {count} Files Processed in this Session:",
+"history_file_label": "File:",
+"history_from_post_label": "From Post:",
+"history_post_uploaded_label": "Post Uploaded:",
+"history_file_downloaded_label": "File Downloaded:",
+"download_history_dialog_title_empty": "Download History (Empty)",
+"no_download_history_header": "No Downloads Yet",
"cookie_help_step1_extension_intro":"Install the 'Get cookies.txt LOCALLY' extension for your Chrome-based browser:
Get cookies.txt LOCALLY on Chrome Web Store",
"cookie_help_step2_login":"Go to the website (e.g., kemono.su or coomer.su) and log in if necessary.",
"cookie_help_step3_click_icon":"Click the extension's icon in your browser toolbar.",
@@ -827,6 +845,14 @@ Output: 'Chapter One.jpg', 'Chapter One_1.png', 'Chapter One_2.gif'.""",
"cookie_help_dialog_title":"Cookieファイルの説明",
"cookie_help_instruction_intro":"Cookieを使用するには、通常ブラウザからcookies.txtファイルが必要です。
",
"cookie_help_how_to_get_title":"cookies.txtの入手方法:
",
+"download_history_dialog_title_first_processed": "最初に処理されたファイルの履歴",
+"first_files_processed_header": "このセッションで最初に処理された {count} 個のファイル:",
+"history_file_label": "ファイル:",
+"history_from_post_label": "投稿元:",
+"history_post_uploaded_label": "投稿アップロード日時:",
+"history_file_downloaded_label": "ファイルダウンロード日時:",
+"download_history_dialog_title_empty": "ダウンロード履歴 (空)",
+"no_download_history_header": "まだダウンロードがありません",
"cookie_help_step1_extension_intro":"Chromeベースのブラウザに「Get cookies.txt LOCALLY」拡張機能をインストールします:
ChromeウェブストアでGet cookies.txt LOCALLYを入手",
"cookie_help_step2_login":"ウェブサイト(例: kemono.suまたはcoomer.su)にアクセスし、必要に応じてログインします。",
"cookie_help_step3_click_icon":"ブラウザのツールバーにある拡張機能のアイコンをクリックします。",
@@ -1326,6 +1352,14 @@ Sortie : 'Chapitre Un.jpg', 'Chapitre Un_1.png', 'Chapitre Un_2.gif'.""",
"cookie_help_dialog_title":"Instructions pour le fichier de cookies",
"cookie_help_instruction_intro":"Pour utiliser les cookies, vous avez généralement besoin d'un fichier cookies.txt de votre navigateur.
",
"cookie_help_how_to_get_title":"Comment obtenir cookies.txt :
",
+"download_history_dialog_title_first_processed": "Historique des premiers fichiers traités",
+"first_files_processed_header": "{count} premiers fichiers traités dans cette session :",
+"history_file_label": "Fichier :",
+"history_from_post_label": "De la publication :",
+"history_post_uploaded_label": "Publication téléversée le :",
+"history_file_downloaded_label": "Fichier téléchargé le :",
+"download_history_dialog_title_empty": "Historique des téléchargements (Vide)",
+"no_download_history_header": "Aucun téléchargement pour le moment",
"cookie_help_step1_extension_intro":"Installez l'extension 'Get cookies.txt LOCALLY' pour votre navigateur basé sur Chrome :
Get cookies.txt LOCALLY sur le Chrome Web Store",
"cookie_help_step2_login":"Allez sur le site web (ex., kemono.su ou coomer.su) et connectez-vous si nécessaire.",
"cookie_help_step3_click_icon":"Cliquez sur l'icône de l'extension dans la barre d'outils de votre navigateur.",
@@ -2365,6 +2399,14 @@ translations ["zh_CN"].update ({
"cookie_help_dialog_title":"Cookie 文件说明",
"cookie_help_instruction_intro":"要使用 cookie,您通常需要浏览器中的 cookies.txt 文件。
",
"cookie_help_how_to_get_title":"如何获取 cookies.txt:
",
+"download_history_dialog_title_first_processed": "首次处理文件历史记录",
+"first_files_processed_header": "此会话中首次处理的 {count} 个文件:",
+"history_file_label": "文件:",
+"history_from_post_label": "来自帖子:",
+"history_post_uploaded_label": "帖子上传于:",
+"history_file_downloaded_label": "文件下载于:",
+"download_history_dialog_title_empty": "下载历史记录(空)",
+"no_download_history_header": "尚无下载",
"cookie_help_step1_extension_intro":"为您的基于 Chrome 的浏览器安装“Get cookies.txt LOCALLY”扩展程序:
在 Chrome 网上应用店获取 Get cookies.txt LOCALLY",
"cookie_help_step2_login":"转到网站(例如,kemono.su 或 coomer.su)并根据需要登录。",
"cookie_help_step3_click_icon":"单击浏览器工具栏中的扩展程序图标。",
@@ -2716,6 +2758,14 @@ translations ["ru"].update ({
"cookie_help_dialog_title":"Инструкции по файлу cookie",
"cookie_help_instruction_intro":"Для использования файлов cookie обычно требуется файл cookies.txt из вашего браузера.
",
"cookie_help_how_to_get_title":"Как получить cookies.txt:
",
+"download_history_dialog_title_first_processed": "История первых обработанных файлов",
+"first_files_processed_header": "Первые {count} файлов, обработанных в этой сессии:",
+"history_file_label": "Файл:",
+"history_from_post_label": "Из поста:",
+"history_post_uploaded_label": "Пост загружен:",
+"history_file_downloaded_label": "Файл скачан:",
+"download_history_dialog_title_empty": "История загрузок (пусто)",
+"no_download_history_header": "Загрузок пока нет",
"cookie_help_step1_extension_intro":"Установите расширение 'Get cookies.txt LOCALLY' для вашего браузера на основе Chrome:
Получить cookies.txt LOCALLY в Chrome Web Store",
"cookie_help_step2_login":"Перейдите на веб-сайт (например, kemono.su или coomer.su) и при необходимости войдите в систему.",
"cookie_help_step3_click_icon":"Нажмите на значок расширения на панели инструментов вашего браузера.",
@@ -3066,6 +3116,14 @@ translations ["ko"].update ({
"cookie_help_dialog_title":"쿠키 파일 지침",
"cookie_help_instruction_intro":"쿠키를 사용하려면 일반적으로 브라우저에서 cookies.txt 파일이 필요합니다.
",
"cookie_help_how_to_get_title":"cookies.txt를 얻는 방법:
",
+"download_history_dialog_title_first_processed": "처음 처리된 파일 기록",
+"first_files_processed_header": "이 세션에서 처음 처리된 {count}개 파일:",
+"history_file_label": "파일:",
+"history_from_post_label": "게시물 출처:",
+"history_post_uploaded_label": "게시물 업로드 날짜:",
+"history_file_downloaded_label": "파일 다운로드 날짜:",
+"download_history_dialog_title_empty": "다운로드 기록 (비어 있음)",
+"no_download_history_header": "아직 다운로드 없음",
"cookie_help_step1_extension_intro":"Chrome 기반 브라우저용 'Get cookies.txt LOCALLY' 확장 프로그램을 설치하십시오:
Chrome 웹 스토어에서 Get cookies.txt LOCALLY 받기",
"cookie_help_step2_login":"웹사이트(예: kemono.su 또는 coomer.su)로 이동하여 필요한 경우 로그인하십시오.",
"cookie_help_step3_click_icon":"브라우저 도구 모음에서 확장 프로그램 아이콘을 클릭하십시오.",
@@ -3416,6 +3474,14 @@ Salida: 'Capítulo Uno.jpg', 'Capítulo Uno_1.png', 'Capítulo Uno_2.gif'.""",
"cookie_help_dialog_title":"Instrucciones del archivo de cookies",
"cookie_help_instruction_intro":"Para usar cookies, normalmente necesita un archivo cookies.txt de su navegador.
",
"cookie_help_how_to_get_title":"Cómo obtener cookies.txt:
",
+"download_history_dialog_title_first_processed": "Historial de los primeros archivos procesados",
+"first_files_processed_header": "Primeros {count} archivos procesados en esta sesión:",
+"history_file_label": "Archivo:",
+"history_from_post_label": "De la publicación:",
+"history_post_uploaded_label": "Publicación subida el:",
+"history_file_downloaded_label": "Archivo descargado el:",
+"download_history_dialog_title_empty": "Historial de descargas (Vacío)",
+"no_download_history_header": "Aún no hay descargas",
"cookie_help_step1_extension_intro":"Instale la extensión 'Get cookies.txt LOCALLY' para su navegador basado en Chrome:
Obtener cookies.txt LOCALLY en Chrome Web Store",
"cookie_help_step2_login":"Vaya al sitio web (p. ej., kemono.su o coomer.su) e inicie sesión si es necesario.",
"cookie_help_step3_click_icon":"Haga clic en el icono de la extensión en la barra de herramientas de su navegador.",
@@ -3778,6 +3844,14 @@ Ausgabe: 'Kapitel Eins.jpg', 'Kapitel Eins_1.png', 'Kapitel Eins_2.gif'.""",
"cookie_help_dialog_title":"Anweisungen zur Cookie-Datei",
"cookie_help_instruction_intro":"Um Cookies zu verwenden, benötigen Sie normalerweise eine cookies.txt-Datei aus Ihrem Browser.
",
"cookie_help_how_to_get_title":"So erhalten Sie cookies.txt:
",
+"download_history_dialog_title_first_processed": "Verlauf der zuerst verarbeiteten Dateien",
+"first_files_processed_header": "Erste {count} in dieser Sitzung verarbeitete Dateien:",
+"history_file_label": "Datei:",
+"history_from_post_label": "Aus Beitrag:",
+"history_post_uploaded_label": "Beitrag hochgeladen am:",
+"history_file_downloaded_label": "Datei heruntergeladen am:",
+"download_history_dialog_title_empty": "Download-Verlauf (Leer)",
+"no_download_history_header": "Noch keine Downloads",
"cookie_help_step1_extension_intro":"Installieren Sie die Erweiterung 'Get cookies.txt LOCALLY' für Ihren Chrome-basierten Browser:
Get cookies.txt LOCALLY im Chrome Web Store",
"cookie_help_step2_login":"Gehen Sie zur Website (z. B. kemono.su oder coomer.su) und melden Sie sich bei Bedarf an.",
"cookie_help_step3_click_icon":"Klicken Sie auf das Erweiterungssymbol in Ihrer Browser-Symbolleiste.",
@@ -4140,6 +4214,14 @@ Saída: 'Capítulo Um.jpg', 'Capítulo Um_1.png', 'Capítulo Um_2.gif'.""",
"cookie_help_dialog_title":"Instruções do Arquivo de Cookie",
"cookie_help_instruction_intro":"Para usar cookies, você normalmente precisa de um arquivo cookies.txt do seu navegador.
",
"cookie_help_how_to_get_title":"Como obter o cookies.txt:
",
+"download_history_dialog_title_first_processed": "Histórico dos Primeiros Arquivos Processados",
+"first_files_processed_header": "Primeiros {count} Arquivos Processados nesta Sessão:",
+"history_file_label": "Arquivo:",
+"history_from_post_label": "Da Publicação:",
+"history_post_uploaded_label": "Publicação Enviada em:",
+"history_file_downloaded_label": "Arquivo Baixado em:",
+"download_history_dialog_title_empty": "Histórico de Downloads (Vazio)",
+"no_download_history_header": "Nenhum Download Ainda",
"cookie_help_step1_extension_intro":"Instale a extensão 'Get cookies.txt LOCALLY' para seu navegador baseado em Chrome:
Obter Get cookies.txt LOCALLY na Chrome Web Store",
"cookie_help_step2_login":"Vá para o site (ex: kemono.su ou coomer.su) e faça login, se necessário.",
"cookie_help_step3_click_icon":"Clique no ícone da extensão na barra de ferramentas do seu navegador.",
@@ -4492,6 +4574,14 @@ translations ["zh_TW"].update ({
"cookie_help_dialog_title":"Cookie 檔案說明",
"cookie_help_instruction_intro":"要使用 cookie,您通常需要瀏覽器中的 cookies.txt 檔案。
",
"cookie_help_how_to_get_title":"如何取得 cookies.txt:
",
+"download_history_dialog_title_first_processed": "最初處理的檔案歷史記錄",
+"first_files_processed_header": "此工作階段中最初處理的 {count} 個檔案:",
+"history_file_label": "檔案:",
+"history_from_post_label": "來自貼文:",
+"history_post_uploaded_label": "貼文上傳於:",
+"history_file_downloaded_label": "檔案下載於:",
+"download_history_dialog_title_empty": "下載歷史記錄(空)",
+"no_download_history_header": "尚無下載",
"cookie_help_step1_extension_intro":"為您的 Chrome 瀏覽器安裝「Get cookies.txt LOCALLY」擴充功能:
在 Chrome 線上應用程式商店取得 Get cookies.txt LOCALLY",
"cookie_help_step2_login":"前往網站(例如 kemono.su 或 coomer.su)並在需要時登入。",
"cookie_help_step3_click_icon":"點擊瀏覽器工具列中的擴充功能圖示。",
diff --git a/main.py b/main.py
index 7512acd..7073008 100644
--- a/main.py
+++ b/main.py
@@ -11,6 +11,7 @@ import http .client
import traceback
import html
import subprocess
+import datetime # Import the datetime module
import random
from collections import deque
import unicodedata
@@ -61,6 +62,7 @@ try :
FILE_DOWNLOAD_STATUS_SKIPPED ,
FILE_DOWNLOAD_STATUS_FAILED_RETRYABLE_LATER ,
STYLE_DATE_BASED ,
+ STYLE_DATE_POST_TITLE, # Import new style
STYLE_POST_TITLE_GLOBAL_NUMBERING ,
CREATOR_DOWNLOAD_DEFAULT_FOLDER_IGNORE_WORDS ,
download_mega_file as drive_download_mega_file ,
@@ -99,6 +101,7 @@ except ImportError as e :
FILE_DOWNLOAD_STATUS_SKIPPED ="skipped"
FILE_DOWNLOAD_STATUS_FAILED_RETRYABLE_LATER ="failed_retry_later"
STYLE_DATE_BASED ="date_based"
+ STYLE_DATE_POST_TITLE = "date_post_title"
STYLE_POST_TITLE_GLOBAL_NUMBERING ="post_title_global_numbering"
CREATOR_DOWNLOAD_DEFAULT_FOLDER_IGNORE_WORDS =set ()
def drive_download_mega_file (*args ,**kwargs ):print ("drive_download_mega_file (stub)");pass
@@ -162,7 +165,7 @@ HTML_PREFIX =""
CONFIG_ORGANIZATION_NAME ="KemonoDownloader"
CONFIG_APP_NAME_MAIN ="ApplicationSettings"
MANGA_FILENAME_STYLE_KEY ="mangaFilenameStyleV1"
-STYLE_POST_TITLE ="post_title"
+STYLE_POST_TITLE ="post_title" # Already defined in downloader_utils, but kept for clarity if used locally
STYLE_ORIGINAL_NAME ="original_name"
STYLE_DATE_BASED ="date_based"
STYLE_POST_TITLE_GLOBAL_NUMBERING =STYLE_POST_TITLE_GLOBAL_NUMBERING
@@ -332,6 +335,7 @@ class ConfirmAddAllDialog (QDialog ):
self .parent_app =parent_app
self .setModal (True )
self .new_filter_objects_list =new_filter_objects_list
+ self.setWindowTitle(self._tr("confirm_add_all_dialog_title", "Confirm Adding New Names"))
self .user_choice =CONFIRM_ADD_ALL_CANCEL_DOWNLOAD
self .setWindowTitle (self ._tr ("confirm_add_all_dialog_title","Confirm Adding New Names"))
@@ -400,6 +404,7 @@ class ConfirmAddAllDialog (QDialog ):
self .add_selected_button .setDefault (True )
def _tr (self ,key ,default_text =""):
+
if callable (get_translation )and self .parent_app :
return get_translation (self .parent_app .current_selected_language ,key ,default_text )
return default_text
@@ -832,7 +837,10 @@ class EmptyPopupDialog (QDialog ):
self .globally_selected_creators ={}
self.fetched_posts_data = {} # Stores posts by (service, user_id)
self.post_fetch_thread = None
+ self.TITLE_COLUMN_WIDTH_FOR_POSTS = 70 # Define column width
self.globally_selected_post_ids = set() # To store (service, user_id, post_id) tuples
+ self._is_scrolling_titles = False # For scroll synchronization
+ self._is_scrolling_dates = False # For scroll synchronization
# Main layout for the dialog will be a QHBoxLayout holding the splitter
dialog_layout = QHBoxLayout(self)
@@ -889,13 +897,38 @@ class EmptyPopupDialog (QDialog ):
right_pane_layout.addWidget(self.posts_area_title_label)
self.posts_search_input = QLineEdit()
- self.posts_search_input.setVisible(False) # Initially hidden until posts are fetched
+ self.posts_search_input.setVisible(False) # Initially hidden until posts are fetched
# Placeholder text will be set in _retranslate_ui
self.posts_search_input.textChanged.connect(self._filter_fetched_posts_list)
- right_pane_layout.addWidget(self.posts_search_input)
+ right_pane_layout.addWidget(self.posts_search_input) # Moved search input up
- self.posts_list_widget = QListWidget()
- right_pane_layout.addWidget(self.posts_list_widget)
+ # Headers for the new two-column layout (Title and Date)
+ posts_headers_layout = QHBoxLayout()
+ self.posts_title_header_label = QLabel() # Text set in _retranslate_ui
+ self.posts_title_header_label.setStyleSheet("font-weight: bold; padding-left: 20px;") # Padding for checkbox alignment
+ posts_headers_layout.addWidget(self.posts_title_header_label, 7) # 70% stretch factor
+
+ self.posts_date_header_label = QLabel() # Text set in _retranslate_ui
+ self.posts_date_header_label.setStyleSheet("font-weight: bold;")
+ posts_headers_layout.addWidget(self.posts_date_header_label, 3) # 30% stretch factor
+ right_pane_layout.addLayout(posts_headers_layout)
+
+
+ # Splitter for Title and Date lists
+ self.posts_content_splitter = QSplitter(Qt.Horizontal)
+
+ self.posts_title_list_widget = QListWidget() # Renamed from self.posts_list_widget
+ self.posts_title_list_widget.itemChanged.connect(self._handle_post_item_check_changed)
+ self.posts_title_list_widget.setAlternatingRowColors(True) # Enable alternating row colors
+ self.posts_content_splitter.addWidget(self.posts_title_list_widget)
+
+ self.posts_date_list_widget = QListWidget() # New list for dates
+ self.posts_date_list_widget.setSelectionMode(QAbstractItemView.NoSelection) # Dates are not selectable/interactive
+ self.posts_date_list_widget.setAlternatingRowColors(True) # Enable alternating row colors
+ self.posts_date_list_widget.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff) # No horizontal scroll for dates
+ self.posts_content_splitter.addWidget(self.posts_date_list_widget)
+
+ right_pane_layout.addWidget(self.posts_content_splitter, 1) # Add stretch factor of 1
posts_buttons_top_layout = QHBoxLayout()
self.posts_select_all_button = QPushButton() # Text set in _retranslate_ui
@@ -927,7 +960,11 @@ class EmptyPopupDialog (QDialog ):
self.main_splitter.addWidget(self.left_pane_widget)
self.main_splitter.addWidget(self.right_pane_widget)
self.main_splitter.setCollapsible(0, False) # Prevent left pane from collapsing
- self.main_splitter.setCollapsible(1, True)
+ self.main_splitter.setCollapsible(1, True) # Allow right pane to be hidden
+
+ # Connect scrollbars for synchronized scrolling (AFTER both widgets are created)
+ self.posts_title_list_widget.verticalScrollBar().valueChanged.connect(self._sync_scroll_dates)
+ self.posts_date_list_widget.verticalScrollBar().valueChanged.connect(self._sync_scroll_titles)
dialog_layout.addWidget(self.main_splitter)
self.original_size = self.sizeHint() # Store initial size hint
@@ -971,16 +1008,19 @@ class EmptyPopupDialog (QDialog ):
self.right_pane_widget.show()
QTimer.singleShot(10, lambda: self.main_splitter.setSizes([int(self.width() * 0.3), int(self.width() * 0.7)]))
+ # Set initial sizes for the new posts_content_splitter (70/30 for title/date)
+ QTimer.singleShot(20, lambda: self.posts_content_splitter.setSizes([int(self.posts_content_splitter.width() * 0.7), int(self.posts_content_splitter.width() * 0.3)]))
self.add_selected_button.setEnabled(False)
self.globally_selected_post_ids.clear() # Clear previous post selections
self.posts_search_input.setVisible(True)
self.setWindowTitle(self._tr("creator_popup_title_fetching", "Creator Posts"))
self.fetch_posts_button.setEnabled(False)
- self.posts_list_widget.clear()
+ self.posts_title_list_widget.clear()
+ self.posts_date_list_widget.clear() # Clear date list as well
self.fetched_posts_data.clear()
self.posts_area_title_label.setText(self._tr("fav_posts_loading_status", "Loading favorite posts...")) # Generic loading
- self.posts_list_widget.itemChanged.connect(self._handle_post_item_check_changed) # Connect here
+ self.posts_title_list_widget.itemChanged.connect(self._handle_post_item_check_changed) # Connect here
self.progress_bar.setVisible(True)
if self.post_fetch_thread and self.post_fetch_thread.isRunning():
@@ -1007,6 +1047,9 @@ class EmptyPopupDialog (QDialog ):
self ._update_scope_button_text_and_tooltip ()
self.posts_search_input.setPlaceholderText(self._tr("creator_popup_posts_search_placeholder", "Search fetched posts by title..."))
+ # Set header texts for the new two-column layout
+ self.posts_title_header_label.setText(self._tr("column_header_post_title", "Post Title"))
+ self.posts_date_header_label.setText(self._tr("column_header_date_uploaded", "Date Uploaded"))
# Retranslate right pane elements
self.posts_area_title_label.setText(self._tr("creator_popup_posts_area_title", "Fetched Posts")) # Placeholder key
self.posts_select_all_button.setText(self._tr("select_all_button_text", "Select All"))
@@ -1014,6 +1057,18 @@ class EmptyPopupDialog (QDialog ):
self.posts_add_selected_button.setText(self._tr("creator_popup_add_posts_to_queue_button", "Add Selected Posts to Queue")) # Placeholder key
self.posts_close_button.setText(self._tr("fav_posts_cancel_button", "Cancel")) # Re-use cancel
+ def _sync_scroll_dates(self, value):
+ if not self._is_scrolling_titles: # Check flag
+ self._is_scrolling_dates = True # Set own flag
+ self.posts_date_list_widget.verticalScrollBar().setValue(value)
+ self._is_scrolling_dates = False # Clear own flag
+
+ def _sync_scroll_titles(self, value):
+ if not self._is_scrolling_dates: # Check flag
+ self._is_scrolling_titles = True # Set own flag
+ self.posts_title_list_widget.verticalScrollBar().setValue(value)
+ self._is_scrolling_titles = False # Clear own flag
+
def _perform_initial_load (self ):
"""Called by QTimer to load data after dialog is shown."""
self ._load_creators_from_json ()
@@ -1275,7 +1330,8 @@ class EmptyPopupDialog (QDialog ):
def _handle_posts_fetched(self, creator_info, posts_list):
creator_key = (creator_info.get('service'), str(creator_info.get('id')))
- self.fetched_posts_data[creator_key] = posts_list
+ # Store both creator_info and the posts_list
+ self.fetched_posts_data[creator_key] = (creator_info, posts_list)
self._filter_fetched_posts_list() # Refresh list with current filter
def _filter_fetched_posts_list(self):
@@ -1286,22 +1342,25 @@ class EmptyPopupDialog (QDialog ):
if not self.fetched_posts_data:
self.posts_area_title_label.setText(self._tr("no_posts_fetched_yet_status", "No posts fetched yet."))
elif not search_text:
- data_for_rebuild = self.fetched_posts_data
- total_posts_in_view = sum(len(posts) for posts in data_for_rebuild.values())
+ data_for_rebuild = self.fetched_posts_data
+ # Adjust for tuple structure: (creator_info, posts_list)
+ total_posts_in_view = sum(len(posts_tuple[1]) for posts_tuple in data_for_rebuild.values())
if total_posts_in_view > 0:
self.posts_area_title_label.setText(self._tr("fetched_posts_count_label", "Fetched {count} post(s). Select to add to queue.").format(count=total_posts_in_view))
else:
self.posts_area_title_label.setText(self._tr("no_posts_found_for_selection", "No posts found for selected creator(s)."))
else:
- for creator_key, posts_list in self.fetched_posts_data.items():
+ for creator_key, (creator_data_tuple_part, posts_list_tuple_part) in self.fetched_posts_data.items(): # Unpack tuple
matching_posts_for_creator = [
- post for post in posts_list
+ post for post in posts_list_tuple_part # Use posts_list_tuple_part
if search_text in post.get('title', '').lower()
]
if matching_posts_for_creator:
- data_for_rebuild[creator_key] = matching_posts_for_creator
+ # Store the tuple back, with original creator_info and filtered posts
+ data_for_rebuild[creator_key] = (creator_data_tuple_part, matching_posts_for_creator)
- total_matching_posts = sum(len(posts) for posts in data_for_rebuild.values())
+ # Adjust for tuple structure
+ total_matching_posts = sum(len(posts_tuple[1]) for posts_tuple in data_for_rebuild.values())
if total_matching_posts > 0:
self.posts_area_title_label.setText(self._tr("fetched_posts_count_label_filtered", "Displaying {count} post(s) matching filter.").format(count=total_matching_posts))
else:
@@ -1310,39 +1369,83 @@ class EmptyPopupDialog (QDialog ):
self._rebuild_posts_list_widget(filtered_data_map=data_for_rebuild)
def _rebuild_posts_list_widget(self, filtered_data_map):
- self.posts_list_widget.blockSignals(True) # Block signals during repopulation
- self.posts_list_widget.clear()
+ self.posts_title_list_widget.blockSignals(True) # Block signals during repopulation
+ self.posts_date_list_widget.blockSignals(True)
+ self.posts_title_list_widget.clear()
+ self.posts_date_list_widget.clear() # Clear date list as well
data_to_display = filtered_data_map
if not data_to_display:
- self.posts_list_widget.blockSignals(False)
+ self.posts_title_list_widget.blockSignals(False) # Corrected widget name
+ self.posts_date_list_widget.blockSignals(False)
return
+ # Sort creator keys based on the name stored within the fetched data tuple
sorted_creator_keys = sorted(
data_to_display.keys(),
- key=lambda k: self.globally_selected_creators.get(k, {}).get('name', '').lower()
+ key=lambda k: data_to_display[k][0].get('name', '').lower() # data_to_display[k] is (creator_info, posts_list)
)
total_posts_shown = 0
for creator_key in sorted_creator_keys:
- creator_info_original = self.globally_selected_creators.get(creator_key)
- if not creator_info_original:
- continue
+ # Get creator_info and posts_for_this_creator from the stored tuple
+ creator_info_original, posts_for_this_creator = data_to_display.get(creator_key, (None, []))
- posts_for_this_creator = data_to_display.get(creator_key, [])
- if not posts_for_this_creator:
+ if not creator_info_original or not posts_for_this_creator: # Ensure both parts of tuple are valid
continue
-
+
creator_header_item = QListWidgetItem(f"--- {self._tr('posts_for_creator_header', 'Posts for')} {creator_info_original['name']} ({creator_info_original['service']}) ---")
font = creator_header_item.font()
font.setBold(True)
creator_header_item.setFont(font)
creator_header_item.setFlags(Qt.NoItemFlags)
- self.posts_list_widget.addItem(creator_header_item)
+ self.posts_title_list_widget.addItem(creator_header_item)
+ self.posts_date_list_widget.addItem(QListWidgetItem("")) # Add empty item to date list for spacing
for post in posts_for_this_creator:
post_title = post.get('title', self._tr('untitled_post_placeholder', 'Untitled Post'))
- item = QListWidgetItem(f" {post_title}")
+
+ # Add date prefix
+ date_prefix_str = "[No Date]" # Default
+ published_date_str = post.get('published')
+ added_date_str = post.get('added')
+
+ date_to_use_str = None
+ if published_date_str:
+ date_to_use_str = published_date_str
+ elif added_date_str:
+ date_to_use_str = added_date_str
+
+ if date_to_use_str:
+ try:
+ # Assuming date is in ISO format like YYYY-MM-DDTHH:MM:SS
+ formatted_date = date_to_use_str.split('T')[0]
+ date_prefix_str = f"[{formatted_date}]"
+ except Exception: # pylint: disable=bare-except
+ pass # Keep "[No Date]" if parsing fails
+
+ # Determine date string
+ date_display_str = "[No Date]" # Default
+ published_date_str = post.get('published')
+ added_date_str = post.get('added')
+
+ date_to_use_str = None
+ if published_date_str:
+ date_to_use_str = published_date_str
+ elif added_date_str:
+ date_to_use_str = added_date_str
+
+ if date_to_use_str:
+ try:
+ # Assuming date is in ISO format like YYYY-MM-DDTHH:MM:SS
+ formatted_date = date_to_use_str.split('T')[0]
+ date_display_str = f"[{formatted_date}]"
+ except Exception: # pylint: disable=bare-except
+ pass # Keep "[No Date]" if parsing fails
+
+ # Title item
+ title_item_text = f" {post_title}" # Display full title, QListWidget handles ellipsis
+ item = QListWidgetItem(title_item_text)
item.setFlags(item.flags() | Qt.ItemIsUserCheckable)
item.setCheckState(Qt.Unchecked)
item_data = {
@@ -1351,7 +1454,9 @@ class EmptyPopupDialog (QDialog ):
'service': creator_info_original['service'],
'user_id': creator_info_original['id'],
'creator_name': creator_info_original['name'],
- 'full_post_data': post
+ 'full_post_data': post,
+ 'date_display_str': date_display_str, # Store formatted date for easy access
+ 'published_date_for_sort': date_to_use_str # Store raw date for potential future sorting
}
item.setData(Qt.UserRole, item_data)
post_unique_key = (
@@ -1364,9 +1469,15 @@ class EmptyPopupDialog (QDialog ):
else:
item.setCheckState(Qt.Unchecked)
- self.posts_list_widget.addItem(item)
+ self.posts_title_list_widget.addItem(item)
total_posts_shown += 1
- self.posts_list_widget.blockSignals(False) # Unblock signals
+ # Date item (purely display)
+ date_item = QListWidgetItem(f" {date_display_str}")
+ date_item.setFlags(Qt.NoItemFlags) # Not selectable, not checkable
+ self.posts_date_list_widget.addItem(date_item)
+
+ self.posts_title_list_widget.blockSignals(False) # Unblock signals
+ self.posts_date_list_widget.blockSignals(False)
def _handle_fetch_error(self, creator_info, error_message):
creator_name = creator_info.get('name', 'Unknown Creator')
@@ -1386,16 +1497,17 @@ class EmptyPopupDialog (QDialog ):
else:
self.posts_area_title_label.setText(self._tr("failed_to_fetch_or_no_posts_label", "Failed to fetch posts or no posts found."))
self.posts_search_input.setVisible(False)
- elif not self.posts_list_widget.count() and not self.posts_search_input.text().strip():
+ elif not self.posts_title_list_widget.count() and not self.posts_search_input.text().strip():
self.posts_area_title_label.setText(self._tr("no_posts_found_for_selection", "No posts found for selected creator(s)."))
self.posts_search_input.setVisible(True)
else:
+ QTimer.singleShot(10, lambda: self.posts_content_splitter.setSizes([int(self.posts_content_splitter.width() * 0.7), int(self.posts_content_splitter.width() * 0.3)]))
self.posts_search_input.setVisible(True)
def _handle_posts_select_all(self):
- self.posts_list_widget.blockSignals(True)
- for i in range(self.posts_list_widget.count()):
- item = self.posts_list_widget.item(i)
+ self.posts_title_list_widget.blockSignals(True)
+ for i in range(self.posts_title_list_widget.count()):
+ item = self.posts_title_list_widget.item(i)
if item.flags() & Qt.ItemIsUserCheckable:
item.setCheckState(Qt.Checked)
@@ -1408,16 +1520,16 @@ class EmptyPopupDialog (QDialog ):
str(item_data['id'])
)
self.globally_selected_post_ids.add(post_unique_key)
- self.posts_list_widget.blockSignals(False)
+ self.posts_title_list_widget.blockSignals(False)
def _handle_posts_deselect_all(self):
- self.posts_list_widget.blockSignals(True)
- for i in range(self.posts_list_widget.count()):
- item = self.posts_list_widget.item(i)
+ self.posts_title_list_widget.blockSignals(True)
+ for i in range(self.posts_title_list_widget.count()):
+ item = self.posts_title_list_widget.item(i)
if item.flags() & Qt.ItemIsUserCheckable:
item.setCheckState(Qt.Unchecked)
self.globally_selected_post_ids.clear() # Deselect all means clear all global selections
- self.posts_list_widget.blockSignals(False)
+ self.posts_title_list_widget.blockSignals(False)
def _handle_post_item_check_changed(self, item):
if not item or not item.data(Qt.UserRole): # Ignore header items or invalid items
@@ -1447,14 +1559,17 @@ class EmptyPopupDialog (QDialog ):
post_data_found = None
creator_key_for_fetched_data = (service, user_id_str)
+ # Access posts from the tuple structure in self.fetched_posts_data
if creator_key_for_fetched_data in self.fetched_posts_data:
- for post_in_list in self.fetched_posts_data[creator_key_for_fetched_data]:
+ _unused_creator_info, posts_in_list_for_creator = self.fetched_posts_data[creator_key_for_fetched_data]
+ for post_in_list in posts_in_list_for_creator:
if str(post_in_list.get('id')) == post_id_str:
post_data_found = post_in_list
break
if post_data_found:
- creator_info_original = self.globally_selected_creators.get(creator_key_for_fetched_data)
+ # Get creator_info from the fetched_posts_data tuple
+ creator_info_original, _unused_posts = self.fetched_posts_data.get(creator_key_for_fetched_data, ({}, []))
creator_name = creator_info_original.get('name', 'Unknown Creator') if creator_info_original else 'Unknown Creator'
domain = self._get_domain_for_service(service)
@@ -1510,6 +1625,8 @@ class EmptyPopupDialog (QDialog ):
self.right_pane_widget.hide()
self.main_splitter.setSizes([self.width(), 0])
self.posts_list_widget.itemChanged.disconnect(self._handle_post_item_check_changed) # Disconnect
+ if hasattr(self, '_handle_post_item_check_changed'): # Check if connected before disconnecting
+ self.posts_title_list_widget.itemChanged.disconnect(self._handle_post_item_check_changed)
self.posts_search_input.setVisible(False)
self.posts_search_input.clear()
self.globally_selected_post_ids.clear()
@@ -1627,8 +1744,9 @@ class PostsFetcherThread(QThread):
use_cookie=use_cookie_param,
cookie_text=cookie_text_param,
selected_cookie_file=selected_cookie_file_param,
- app_base_dir=app_base_dir_param,
- cancellation_event=self.cancellation_flag # Pass the thread's own cancellation event
+ app_base_dir=app_base_dir_param, # corrected comma
+ manga_filename_style_for_sort_check=None, # PostsFetcherThread doesn't use manga mode settings for its own fetching
+ cancellation_event=self.cancellation_flag
)
for posts_batch in post_generator:
@@ -1669,6 +1787,8 @@ class CookieHelpDialog (QDialog ):
CHOICE_PROCEED_WITHOUT_COOKIES =1
CHOICE_CANCEL_DOWNLOAD =2
CHOICE_OK_INFO_ONLY =3
+ _is_scrolling_titles = False # For scroll synchronization
+ _is_scrolling_dates = False # For scroll synchronization
def __init__ (self ,parent_app ,parent =None ,offer_download_without_option =False ):
super ().__init__ (parent )
@@ -1755,6 +1875,193 @@ class CookieHelpDialog (QDialog ):
self .user_choice =self .CHOICE_OK_INFO_ONLY
self .accept ()
+class DownloadHistoryDialog(QDialog):
+ """Dialog to display download history."""
+ def __init__(self, last_3_downloaded_entries, first_processed_entries, parent_app, parent=None):
+ super().__init__(parent)
+ self.parent_app = parent_app
+ self.last_3_downloaded_entries = last_3_downloaded_entries
+ self.first_processed_entries = first_processed_entries
+ self.setModal(True)
+
+ app_icon = get_app_icon_object()
+ if not app_icon.isNull():
+ self.setWindowIcon(app_icon)
+
+ screen_height = QApplication.primaryScreen().availableGeometry().height() if QApplication.primaryScreen() else 768
+ scale_factor = screen_height / 768.0
+ base_min_w, base_min_h = 600, 450
+ # Increase width to accommodate two panes
+ scaled_min_w = int(base_min_w * 1.5 * scale_factor)
+ scaled_min_h = int(base_min_h * scale_factor)
+ self.setMinimumSize(scaled_min_w, scaled_min_h)
+
+ self.setWindowTitle(self._tr("download_history_dialog_title_combined", "Download History"))
+
+ # Main layout for the dialog will be a QVBoxLayout
+ dialog_layout = QVBoxLayout(self)
+ self.setLayout(dialog_layout)
+
+ # --- Splitter ---
+ self.main_splitter = QSplitter(Qt.Horizontal)
+ dialog_layout.addWidget(self.main_splitter)
+
+ # --- Left Pane (Last 3 Downloaded Files) ---
+ left_pane_widget = QWidget()
+ left_layout = QVBoxLayout(left_pane_widget)
+ left_header_label = QLabel(self._tr("history_last_downloaded_header", "Last 3 Files Downloaded:"))
+ left_header_label.setAlignment(Qt.AlignCenter)
+ left_layout.addWidget(left_header_label)
+
+ left_scroll_area = QScrollArea()
+ left_scroll_area.setWidgetResizable(True)
+ left_scroll_content_widget = QWidget()
+ left_scroll_layout = QVBoxLayout(left_scroll_content_widget)
+
+ if not self.last_3_downloaded_entries:
+ no_left_history_label = QLabel(self._tr("no_download_history_header", "No Downloads Yet"))
+ no_left_history_label.setAlignment(Qt.AlignCenter)
+ left_scroll_layout.addWidget(no_left_history_label)
+ else:
+ for entry in self.last_3_downloaded_entries:
+ group_box = QGroupBox(f"{self._tr('history_file_label', 'File:')} {entry.get('disk_filename', 'N/A')}")
+ group_layout = QVBoxLayout(group_box)
+ details_text = (
+ f"{self._tr('history_from_post_label', 'From Post:')} {entry.get('post_title', 'N/A')} (ID: {entry.get('post_id', 'N/A')})
"
+ f"{self._tr('history_creator_series_label', 'Creator/Series:')} {entry.get('creator_display_name', 'N/A')}
"
+ f"{self._tr('history_post_uploaded_label', 'Post Uploaded:')} {entry.get('upload_date_str', 'N/A')}
"
+ f"{self._tr('history_file_downloaded_label', 'File Downloaded:')} {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entry.get('download_timestamp', 0)))}
"
+ f"{self._tr('history_saved_in_folder_label', 'Saved In Folder:')} {entry.get('download_path', 'N/A')}"
+ )
+ details_label = QLabel(details_text)
+ details_label.setWordWrap(True)
+ details_label.setTextFormat(Qt.RichText)
+ group_layout.addWidget(details_label)
+ left_scroll_layout.addWidget(group_box)
+ left_scroll_area.setWidget(left_scroll_content_widget)
+ left_layout.addWidget(left_scroll_area)
+ self.main_splitter.addWidget(left_pane_widget)
+
+ # --- Right Pane (First Processed Posts) ---
+ right_pane_widget = QWidget()
+ right_layout = QVBoxLayout(right_pane_widget)
+ right_header_label = QLabel(self._tr("first_files_processed_header", "First {count} Posts Processed This Session:").format(count=len(self.first_processed_entries)))
+ right_header_label.setAlignment(Qt.AlignCenter)
+ right_layout.addWidget(right_header_label)
+
+ right_scroll_area = QScrollArea()
+ right_scroll_area.setWidgetResizable(True)
+ right_scroll_content_widget = QWidget()
+ right_scroll_layout = QVBoxLayout(right_scroll_content_widget)
+
+ if not self.first_processed_entries:
+ no_right_history_label = QLabel(self._tr("no_processed_history_header", "No Posts Processed Yet"))
+ no_right_history_label.setAlignment(Qt.AlignCenter)
+ right_scroll_layout.addWidget(no_right_history_label)
+ else:
+ for entry in self.first_processed_entries:
+ # Using 'Post:' for the group title as it's more accurate for this section
+ group_box = QGroupBox(f"{self._tr('history_post_label', 'Post:')} {entry.get('post_title', 'N/A')} (ID: {entry.get('post_id', 'N/A')})")
+ group_layout = QVBoxLayout(group_box)
+ details_text = (
+ f"{self._tr('history_creator_label', 'Creator:')} {entry.get('creator_name', 'N/A')}
"
+ f"{self._tr('history_top_file_label', 'Top File:')} {entry.get('top_file_name', 'N/A')}
"
+ f"{self._tr('history_num_files_label', 'Num Files in Post:')} {entry.get('num_files', 0)}
"
+ f"{self._tr('history_post_uploaded_label', 'Post Uploaded:')} {entry.get('upload_date_str', 'N/A')}
"
+ f"{self._tr('history_processed_on_label', 'Processed On:')} {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entry.get('download_date_timestamp', 0)))}
"
+ f"{self._tr('history_saved_to_folder_label', 'Saved To Folder:')} {entry.get('download_location', 'N/A')}"
+ )
+ details_label = QLabel(details_text)
+ details_label.setWordWrap(True)
+ details_label.setTextFormat(Qt.RichText)
+ group_layout.addWidget(details_label)
+ right_scroll_layout.addWidget(group_box)
+ right_scroll_area.setWidget(right_scroll_content_widget)
+ right_layout.addWidget(right_scroll_area)
+ self.main_splitter.addWidget(right_pane_widget)
+
+ # Set initial splitter sizes (e.g., 50/50)
+ QTimer.singleShot(0, lambda: self.main_splitter.setSizes([self.width() // 2, self.width() // 2]))
+
+ # --- Bottom Button Layout ---
+ bottom_button_layout = QHBoxLayout()
+ self.save_history_button = QPushButton(self._tr("history_save_button_text", "Save History to .txt"))
+ self.save_history_button.clicked.connect(self._save_history_to_txt)
+ bottom_button_layout.addStretch(1) # Push to the right
+ bottom_button_layout.addWidget(self.save_history_button)
+ # Add this new layout to the main dialog layout
+ dialog_layout.addLayout(bottom_button_layout)
+
+ if self.parent_app and hasattr(self.parent_app, 'get_dark_theme') and self.parent_app.current_theme == "dark":
+ self.setStyleSheet(self.parent_app.get_dark_theme())
+
+ def _tr(self, key, default_text=""):
+ if callable(get_translation) and self.parent_app:
+ return get_translation(self.parent_app.current_selected_language, key, default_text)
+ return default_text
+
+ def _save_history_to_txt(self):
+ if not self.last_3_downloaded_entries and not self.first_processed_entries:
+ QMessageBox.information(self, self._tr("no_download_history_header", "No Downloads Yet"),
+ self._tr("history_nothing_to_save_message", "There is no history to save."))
+ return
+
+ main_download_dir = self.parent_app.dir_input.text().strip()
+ default_save_dir = ""
+ if main_download_dir and os.path.isdir(main_download_dir):
+ default_save_dir = main_download_dir
+ else:
+ fallback_dir = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation)
+ if fallback_dir and os.path.isdir(fallback_dir):
+ default_save_dir = fallback_dir
+ else:
+ default_save_dir = self.parent_app.app_base_dir
+
+ default_filepath = os.path.join(default_save_dir, "download_history.txt")
+
+ filepath, _ = QFileDialog.getSaveFileName(
+ self, self._tr("history_save_dialog_title", "Save Download History"),
+ default_filepath, "Text Files (*.txt);;All Files (*)"
+ )
+
+ if not filepath:
+ return
+
+ history_content = []
+ history_content.append(f"{self._tr('history_last_downloaded_header', 'Last 3 Files Downloaded:')}\n")
+ if self.last_3_downloaded_entries:
+ for entry in self.last_3_downloaded_entries:
+ history_content.append(f" {self._tr('history_file_label', 'File:')} {entry.get('disk_filename', 'N/A')}")
+ history_content.append(f" {self._tr('history_from_post_label', 'From Post:')} {entry.get('post_title', 'N/A')} (ID: {entry.get('post_id', 'N/A')})")
+ history_content.append(f" {self._tr('history_creator_series_label', 'Creator/Series:')} {entry.get('creator_display_name', 'N/A')}")
+ history_content.append(f" {self._tr('history_post_uploaded_label', 'Post Uploaded:')} {entry.get('upload_date_str', 'N/A')}")
+ history_content.append(f" {self._tr('history_file_downloaded_label', 'File Downloaded:')} {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entry.get('download_timestamp', 0)))}")
+ history_content.append(f" {self._tr('history_saved_in_folder_label', 'Saved In Folder:')} {entry.get('download_path', 'N/A')}\n")
+ else:
+ history_content.append(f" ({self._tr('no_download_history_header', 'No Downloads Yet')})\n")
+
+ history_content.append(f"\n{self._tr('first_files_processed_header', 'First {count} Posts Processed This Session:').format(count=len(self.first_processed_entries))}\n")
+ if self.first_processed_entries:
+ for entry in self.first_processed_entries:
+ history_content.append(f" {self._tr('history_post_label', 'Post:')} {entry.get('post_title', 'N/A')} (ID: {entry.get('post_id', 'N/A')})")
+ history_content.append(f" {self._tr('history_creator_label', 'Creator:')} {entry.get('creator_name', 'N/A')}")
+ history_content.append(f" {self._tr('history_top_file_label', 'Top File:')} {entry.get('top_file_name', 'N/A')}")
+ history_content.append(f" {self._tr('history_num_files_label', 'Num Files in Post:')} {entry.get('num_files', 0)}")
+ history_content.append(f" {self._tr('history_post_uploaded_label', 'Post Uploaded:')} {entry.get('upload_date_str', 'N/A')}")
+ history_content.append(f" {self._tr('history_processed_on_label', 'Processed On:')} {time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(entry.get('download_date_timestamp', 0)))}")
+ history_content.append(f" {self._tr('history_saved_to_folder_label', 'Saved To Folder:')} {entry.get('download_location', 'N/A')}\n")
+ else:
+ history_content.append(f" ({self._tr('no_processed_history_header', 'No Posts Processed Yet')})\n")
+
+ try:
+ with open(filepath, 'w', encoding='utf-8') as f:
+ f.write("\n".join(history_content))
+ QMessageBox.information(self, self._tr("history_export_success_title", "History Export Successful"),
+ self._tr("history_export_success_message", "Successfully exported download history to:\n{filepath}").format(filepath=filepath))
+ except Exception as e:
+ QMessageBox.critical(self, self._tr("history_export_error_title", "History Export Error"),
+ self._tr("history_export_error_message", "Could not export download history: {error}").format(error=str(e)))
+
class KnownNamesFilterDialog (QDialog ):
"""A dialog to select names from Known.txt to add to the filter input."""
def __init__ (self ,known_names_list ,parent_app_ref ,parent =None ):
@@ -3259,6 +3566,8 @@ class DownloaderApp (QWidget ):
log_signal =pyqtSignal (str )
add_character_prompt_signal =pyqtSignal (str )
overall_progress_signal =pyqtSignal (int ,int )
+ file_successfully_downloaded_signal = pyqtSignal(dict) # For actually downloaded files
+ post_processed_for_history_signal = pyqtSignal(dict) # For history data from DownloadThread
finished_signal =pyqtSignal (int ,int ,bool ,list )
external_link_signal =pyqtSignal (str ,str ,str ,str ,str )
file_progress_signal =pyqtSignal (str ,object )
@@ -3282,6 +3591,18 @@ class DownloaderApp (QWidget ):
self .total_posts_to_process =0
self .dynamic_character_filter_holder =DynamicFilterHolder ()
self .processed_posts_count =0
+ self.creator_name_cache = {} # Initialize creator_name_cache
+ self.log_signal.emit(f"ℹ️ App base directory: {self.app_base_dir}")
+
+ # Persistent History Setup
+ app_data_path = QStandardPaths.writableLocation(QStandardPaths.AppDataLocation)
+ self.last_downloaded_files_details = deque(maxlen=3) # For the new left-pane history
+ if not app_data_path: # Fallback if AppDataLocation is not available
+ app_data_path = os.path.join(self.app_base_dir, "app_data")
+ self.persistent_history_file = os.path.join(app_data_path, CONFIG_ORGANIZATION_NAME, CONFIG_APP_NAME_MAIN, "download_history.json")
+ self.download_history_candidates = deque(maxlen=8)
+ self.log_signal.emit(f"ℹ️ Persistent history file path set to: {self.persistent_history_file}")
+ self.final_download_history_entries = []
self .favorite_download_queue =deque ()
self .is_processing_favorites_queue =False
self .download_counter =0
@@ -3291,6 +3612,7 @@ class DownloaderApp (QWidget ):
self .is_fetcher_thread_running =False
self ._restart_pending =False
self .is_processing_favorites_queue =False
+ self.download_history_log = deque(maxlen=50) # For storing recent download history
self .skip_counter =0
self .all_kept_original_filenames =[]
self .cancellation_message_logged_this_session =False
@@ -3415,6 +3737,8 @@ class DownloaderApp (QWidget ):
self .log_signal .emit (f"ℹ️ Scan post content for images defaults to: {'Enabled'if self .scan_content_images_setting else 'Disabled'}")
self .log_signal .emit (f"ℹ️ Application language loaded: '{self .current_selected_language .upper ()}' (UI may not reflect this yet).")
self ._retranslate_main_ui ()
+ self._load_persistent_history() # Load history after UI is mostly set up
+
def _tr (self ,key ,default_text =""):
"""Helper to get translation based on current app language for the main window."""
@@ -3422,6 +3746,16 @@ class DownloaderApp (QWidget ):
return get_translation (self .current_selected_language ,key ,default_text )
return default_text
+ def _initialize_persistent_history_path(self):
+ documents_path = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation)
+ if not documents_path: # Fallback if DocumentsLocation is not available
+ self.log_signal.emit("⚠️ DocumentsLocation not found. Falling back to app base directory for history.")
+ documents_path = self.app_base_dir # Fallback to app's base directory
+
+ history_folder_name = "history" # User wants a folder named "history"
+ self.persistent_history_file = os.path.join(documents_path, history_folder_name, "download_history.json")
+ self.log_signal.emit(f"ℹ️ Persistent history file path set to: {self.persistent_history_file}")
+
def _retranslate_main_ui (self ):
"""Retranslates static text elements in the main UI."""
if self .url_label_widget :
@@ -3591,6 +3925,7 @@ class DownloaderApp (QWidget ):
self .actual_gui_signals .file_progress_signal .connect (self .update_file_progress_display )
self .actual_gui_signals .missed_character_post_signal .connect (self .handle_missed_character_post )
self .actual_gui_signals .external_link_signal .connect (self .handle_external_link_signal )
+ self .actual_gui_signals .file_successfully_downloaded_signal.connect(self._handle_actual_file_downloaded) # Connect new signal
self .actual_gui_signals .file_download_status_signal .connect (lambda status :None )
if hasattr (self ,'character_input'):
@@ -3611,6 +3946,7 @@ class DownloaderApp (QWidget ):
self .add_character_prompt_signal .connect (self .prompt_add_character )
self .character_prompt_response_signal .connect (self .receive_add_character_result )
self .overall_progress_signal .connect (self .update_progress_display )
+ self.post_processed_for_history_signal.connect(self._add_to_history_candidates) # Connect new signal
self .finished_signal .connect (self .download_finished )
if hasattr (self ,'character_search_input'):self .character_search_input .textChanged .connect (self .filter_character_list )
if hasattr (self ,'external_links_checkbox'):self .external_links_checkbox .toggled .connect (self .update_external_links_setting )
@@ -3667,6 +4003,8 @@ class DownloaderApp (QWidget ):
self .favorite_mode_posts_button .clicked .connect (self ._show_favorite_posts_dialog )
if hasattr (self ,'favorite_scope_toggle_button'):
self .favorite_scope_toggle_button .clicked .connect (self ._cycle_favorite_scope )
+ if hasattr(self, 'history_button'): # Connect history button
+ self.history_button.clicked.connect(self._show_download_history_dialog)
if hasattr (self ,'error_btn'):
self .error_btn .clicked .connect (self ._show_error_files_dialog )
@@ -3749,6 +4087,10 @@ class DownloaderApp (QWidget ):
self .actual_gui_signals .file_progress_signal .emit (*payload )
elif signal_type =='missed_character_post':
self .actual_gui_signals .missed_character_post_signal .emit (*payload )
+ elif signal_type == 'file_successfully_downloaded': # Handle new signal type from queue
+ self._handle_actual_file_downloaded(payload[0] if payload else {})
+ elif signal_type == 'file_successfully_downloaded':
+ self._handle_file_successfully_downloaded(payload[0]) # payload is (history_entry_dict,)
else :
self .log_signal .emit (f"⚠️ Unknown signal type from worker queue: {signal_type }")
self .worker_to_gui_queue .task_done ()
@@ -3840,6 +4182,7 @@ class DownloaderApp (QWidget ):
self .settings .setValue (THEME_KEY ,self .current_theme )
self .settings .setValue (LANGUAGE_KEY ,self .current_selected_language )
self .settings .sync ()
+ self._save_persistent_history() # Ensure history is saved on close
should_exit =True
is_downloading =self ._is_download_active ()
@@ -4292,6 +4635,11 @@ class DownloaderApp (QWidget ):
self .known_names_help_button .setStyleSheet ("padding: 4px 6px;")
self .known_names_help_button .clicked .connect (self ._show_feature_guide )
+ self .history_button =QPushButton ("📜") # History emoji
+ self .history_button .setFixedWidth (35 )
+ self .history_button .setStyleSheet ("padding: 4px 6px;")
+ self .history_button .setToolTip (self ._tr ("history_button_tooltip_text","View download history (Not Implemented Yet)"))
+
self .future_settings_button =QPushButton ("⚙️")
self .future_settings_button .setFixedWidth (35 )
self .future_settings_button .setStyleSheet ("padding: 4px 6px;")
@@ -4299,6 +4647,7 @@ class DownloaderApp (QWidget ):
char_manage_layout .addWidget (self .add_to_filter_button ,1 )
char_manage_layout .addWidget (self .delete_char_button ,1 )
char_manage_layout .addWidget (self .known_names_help_button ,0 )
+ char_manage_layout .addWidget (self .history_button ,0 ) # Add the new history button
char_manage_layout .addWidget (self .future_settings_button ,0 )
left_layout .addLayout (char_manage_layout )
left_layout .addStretch (0 )
@@ -4430,6 +4779,8 @@ class DownloaderApp (QWidget ):
if self .manga_mode_checkbox :
self .update_ui_for_manga_mode (self .manga_mode_checkbox .isChecked ())
if hasattr (self ,'link_input'):self .link_input .textChanged .connect (lambda :self .update_ui_for_manga_mode (self .manga_mode_checkbox .isChecked ()if self .manga_mode_checkbox else False ))
+
+ self._load_creator_name_cache_from_json() # Load creator names for history and other features
self .load_known_names_from_util ()
self ._update_cookie_input_visibility (self .use_cookie_checkbox .isChecked ()if hasattr (self ,'use_cookie_checkbox')else False )
self ._handle_multithreading_toggle (self .use_multithreading_checkbox .isChecked ())
@@ -4442,6 +4793,149 @@ class DownloaderApp (QWidget ):
if hasattr (self ,'download_thumbnails_checkbox'):
self ._handle_thumbnail_mode_change (self .download_thumbnails_checkbox .isChecked ())
if hasattr (self ,'favorite_mode_checkbox'):
+
+ self._handle_favorite_mode_toggle(False) # Ensure UI is in non-favorite state after reset
+
+ def _load_persistent_history(self):
+ """Loads download history from a persistent file."""
+ self._initialize_persistent_history_path() # Ensure path is set before loading
+ file_existed_before_load = os.path.exists(self.persistent_history_file)
+ self.log_signal.emit(f"📜 Attempting to load history from: {self.persistent_history_file}")
+ if os.path.exists(self.persistent_history_file):
+ try:
+ with open(self.persistent_history_file, 'r', encoding='utf-8') as f:
+ loaded_history = json.load(f)
+ if isinstance(loaded_history, list):
+ self.final_download_history_entries = loaded_history
+ self.log_signal.emit(f"✅ Loaded {len(loaded_history)} entries from persistent download history: {self.persistent_history_file}")
+ elif loaded_history is None and os.path.getsize(self.persistent_history_file) == 0: # Handle empty file
+ self.log_signal.emit(f"ℹ️ Persistent history file is empty. Initializing with empty history.")
+ self.final_download_history_entries = []
+ else:
+ self.log_signal.emit(f"⚠️ Persistent history file has incorrect format. Expected list, got {type(loaded_history)}. Ignoring.")
+ self.final_download_history_entries = []
+ except json.JSONDecodeError:
+ self.log_signal.emit(f"⚠️ Error decoding persistent history file. It might be corrupted. Ignoring.")
+ self.final_download_history_entries = []
+ except Exception as e:
+ self.log_signal.emit(f"❌ Error loading persistent history: {e}")
+ self.final_download_history_entries = []
+ else:
+ self.log_signal.emit(f"⚠️ Persistent history file NOT FOUND at: {self.persistent_history_file}. Starting with empty history.")
+ self.final_download_history_entries = [] # Initialize to empty if not found
+ self._save_persistent_history() # Attempt to create the directory and an empty history file now
+
+ def _save_persistent_history(self):
+ """Saves download history to a persistent file."""
+ if not hasattr(self, 'persistent_history_file') or not self.persistent_history_file:
+ self._initialize_persistent_history_path() # Ensure path is set before saving
+ self.log_signal.emit(f"📜 Attempting to save history to: {self.persistent_history_file}")
+ try:
+ history_dir = os.path.dirname(self.persistent_history_file)
+ self.log_signal.emit(f" History directory: {history_dir}")
+ if not os.path.exists(history_dir):
+ os.makedirs(history_dir, exist_ok=True)
+ self.log_signal.emit(f" Created history directory: {history_dir}")
+
+ with open(self.persistent_history_file, 'w', encoding='utf-8')as f:
+ json.dump(self.final_download_history_entries, f, indent=2)
+ self.log_signal.emit(f"✅ Saved {len(self.final_download_history_entries)} history entries to: {self.persistent_history_file}")
+ except Exception as e:
+ self.log_signal.emit(f"❌ Error saving persistent history to {self.persistent_history_file}: {e}")
+ def _load_creator_name_cache_from_json(self):
+ """Loads creator id-name-service mappings from creators.json into self.creator_name_cache."""
+ self.log_signal.emit("ℹ️ Attempting to load creators.json for creator name cache.")
+
+ if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
+ base_path_for_creators = sys._MEIPASS
+ else:
+ base_path_for_creators = self.app_base_dir
+
+ creators_file_path = os.path.join(base_path_for_creators, "creators.json")
+
+ if not os.path.exists(creators_file_path):
+ self.log_signal.emit(f"⚠️ 'creators.json' not found at {creators_file_path}. Creator name cache will be empty.")
+ self.creator_name_cache.clear()
+ return
+
+ try:
+ with open(creators_file_path, 'r', encoding='utf-8') as f:
+ loaded_data = json.load(f)
+
+ creators_list = []
+ if isinstance(loaded_data, list) and len(loaded_data) > 0 and isinstance(loaded_data[0], list):
+ creators_list = loaded_data[0]
+ elif isinstance(loaded_data, list) and all(isinstance(item, dict) for item in loaded_data):
+ creators_list = loaded_data
+ else:
+ self.log_signal.emit(f"⚠️ 'creators.json' has an unexpected format. Creator name cache may be incomplete.")
+
+ for creator_data in creators_list:
+ creator_id = creator_data.get("id")
+ name = creator_data.get("name")
+ service = creator_data.get("service")
+ if creator_id and name and service:
+ self.creator_name_cache[(service.lower(), str(creator_id))] = name
+ self.log_signal.emit(f"✅ Successfully loaded {len(self.creator_name_cache)} creator names into cache from 'creators.json'.")
+ except Exception as e:
+ self.log_signal.emit(f"❌ Error loading 'creators.json' for name cache: {e}")
+ self.creator_name_cache.clear()
+
+ def _show_download_history_dialog(self):
+ """Shows the dialog with the finalized download history."""
+ last_3_downloaded = list(self.last_downloaded_files_details)
+ first_processed = self.final_download_history_entries
+
+ if not last_3_downloaded and not first_processed:
+ QMessageBox.information(
+ self,
+ self._tr("download_history_dialog_title_empty", "Download History (Empty)"),
+ self._tr("no_download_history_header", "No Downloads Yet")
+ )
+ return
+
+ dialog = DownloadHistoryDialog(last_3_downloaded, first_processed, self, self)
+ dialog.exec_()
+
+ def _handle_actual_file_downloaded(self, file_details_dict):
+ """Handles a successfully downloaded file for the 'last 3 downloaded' history."""
+ if not file_details_dict:
+ return
+ file_details_dict['download_timestamp'] = time.time() # Ensure timestamp is set
+ creator_key = (file_details_dict.get('service', '').lower(), str(file_details_dict.get('user_id', '')))
+ file_details_dict['creator_display_name'] = self.creator_name_cache.get(creator_key, file_details_dict.get('folder_context_name', 'Unknown Creator/Series'))
+ self.last_downloaded_files_details.append(file_details_dict)
+ # self.log_signal.emit(f"💾 Recorded successful download for history: {file_details_dict.get('disk_filename', 'N/A')}")
+
+ def _handle_file_successfully_downloaded(self, history_entry_dict):
+ """Handles a successfully downloaded file for history logging."""
+ if len(self.download_history_log) >= self.download_history_log.maxlen:
+ self.download_history_log.popleft() # Remove oldest if full
+ self.download_history_log.append(history_entry_dict)
+ # self.log_signal.emit(f"📜 Added to history log: {history_entry_dict.get('post_title', 'N/A')}")
+
+ def _handle_actual_file_downloaded(self, file_details_dict):
+ """Handles a successfully downloaded file for the 'last 3 downloaded' history."""
+ if not file_details_dict:
+ return
+
+ file_details_dict['download_timestamp'] = time.time() # Ensure timestamp is set
+
+ # Resolve creator name for display
+ creator_key = (
+ file_details_dict.get('service', '').lower(),
+ str(file_details_dict.get('user_id', ''))
+ )
+ creator_display_name = self.creator_name_cache.get(creator_key, file_details_dict.get('folder_context_name', 'Unknown Creator'))
+ file_details_dict['creator_display_name'] = creator_display_name
+
+ self.last_downloaded_files_details.append(file_details_dict)
+ # self.log_signal.emit(f"💾 Recorded successful download for history: {file_details_dict.get('disk_filename', 'N/A')}")
+
+ def _handle_favorite_mode_toggle (self ,checked ):
+ if not self .url_or_placeholder_stack or not self .bottom_action_buttons_stack :
+ return
+
self ._handle_favorite_mode_toggle (self .favorite_mode_checkbox .isChecked ())
self ._update_favorite_scope_button_text ()
if hasattr (self ,'link_input'):
@@ -5555,6 +6049,10 @@ class DownloaderApp (QWidget ):
elif self .manga_filename_style ==STYLE_DATE_BASED :
self .manga_rename_toggle_button .setText (self ._tr ("manga_style_date_based_text","Name: Date Based"))
+
+ elif self.manga_filename_style == STYLE_DATE_POST_TITLE: # New style
+ self.manga_rename_toggle_button.setText(self._tr("manga_style_date_post_title_text", "Name: Date + Title")) # Key from languages.py
+
else :
self .manga_rename_toggle_button .setText (self ._tr ("manga_style_unknown_text","Name: Unknown Style"))
@@ -5568,10 +6066,12 @@ class DownloaderApp (QWidget ):
if current_style ==STYLE_POST_TITLE :
new_style =STYLE_ORIGINAL_NAME
elif current_style ==STYLE_ORIGINAL_NAME :
+ new_style =STYLE_DATE_POST_TITLE # Cycle to new style
+ elif current_style == STYLE_DATE_POST_TITLE: # New style in cycle
new_style =STYLE_POST_TITLE_GLOBAL_NUMBERING
elif current_style ==STYLE_POST_TITLE_GLOBAL_NUMBERING :
new_style =STYLE_DATE_BASED
- elif current_style ==STYLE_DATE_BASED :
+ elif current_style == STYLE_DATE_BASED: # Last style in old cycle
new_style =STYLE_POST_TITLE
else :
self .log_signal .emit (f"⚠️ Unknown current manga filename style: {current_style }. Resetting to default ('{STYLE_POST_TITLE }').")
@@ -5671,7 +6171,8 @@ class DownloaderApp (QWidget ):
show_date_prefix_input =(
manga_mode_effectively_on and
- (current_filename_style ==STYLE_DATE_BASED or current_filename_style ==STYLE_ORIGINAL_NAME )and
+ (current_filename_style == STYLE_DATE_BASED or
+ current_filename_style == STYLE_ORIGINAL_NAME) and # Prefix input not for Date+Title
not (is_only_links_mode or is_only_archives_mode or is_only_audio_mode )
)
if hasattr (self ,'manga_date_prefix_input'):
@@ -5780,6 +6281,8 @@ class DownloaderApp (QWidget ):
# 1. direct_api_url was provided (e.g., recursive call from _process_next_favorite_download)
# 2. The favorite_download_queue was empty or already being processed, so we fall back to link_input.
api_url = direct_api_url if direct_api_url else self.link_input.text().strip()
+ self.download_history_candidates.clear() # Clear candidates buffer for new download session
+ # self.final_download_history_entries.clear() # DO NOT CLEAR HERE - loaded history should persist until a new download successfully finalizes new history
if self.favorite_mode_checkbox and self.favorite_mode_checkbox.isChecked() and not direct_api_url and not api_url: # Check api_url here too
QMessageBox.information(self, "Favorite Mode Active",
@@ -6532,6 +7035,11 @@ class DownloaderApp (QWidget ):
if hasattr (self .download_thread ,'missed_character_post_signal'):
self .download_thread .missed_character_post_signal .connect (self .handle_missed_character_post )
if hasattr (self .download_thread ,'retryable_file_failed_signal'):
+ # Connect the new history signal from DownloadThread
+ if hasattr(self.download_thread, 'file_successfully_downloaded_signal'): # Connect new signal for actual downloads
+ self.download_thread.file_successfully_downloaded_signal.connect(self._handle_actual_file_downloaded)
+ if hasattr(self.download_thread, 'post_processed_for_history_signal'): # Check if signal exists
+ self.download_thread.post_processed_for_history_signal.connect(self._add_to_history_candidates)
self .download_thread .retryable_file_failed_signal .connect (self ._handle_retryable_file_failure )
if hasattr(self.download_thread, 'permanent_file_failed_signal'): # Ensure this signal exists on BackendDownloadThread
self.download_thread.permanent_file_failed_signal.connect(self._handle_permanent_file_failure_from_thread)
@@ -6655,7 +7163,17 @@ class DownloaderApp (QWidget ):
start_page =worker_args_template .get ('start_page'),
end_page =worker_args_template .get ('end_page'),
manga_mode =manga_mode_active_for_fetch ,
- cancellation_event =self .cancellation_event
+ cancellation_event=self.cancellation_event,
+ pause_event=worker_args_template.get('pause_event'),
+ use_cookie=worker_args_template.get('use_cookie'),
+ cookie_text=worker_args_template.get('cookie_text'),
+ selected_cookie_file=worker_args_template.get('selected_cookie_file'),
+ app_base_dir=worker_args_template.get('app_base_dir'),
+ manga_filename_style_for_sort_check=(
+ worker_args_template.get('manga_filename_style')
+ if manga_mode_active_for_fetch
+ else None
+ )
)
for posts_batch in post_generator :
@@ -6831,14 +7349,16 @@ class DownloaderApp (QWidget ):
elif future .exception ():
self .log_signal .emit (f"❌ Post processing worker error: {future .exception ()}")
else :
- downloaded_files_from_future ,skipped_files_from_future ,kept_originals_from_future ,retryable_failures_from_post ,permanent_failures_from_post =future .result ()
- if retryable_failures_from_post :
- self .retryable_failed_files_info .extend (retryable_failures_from_post )
- if permanent_failures_from_post :
- self .permanently_failed_files_for_dialog .extend (permanent_failures_from_post )
+ # unpack the new history_data from the future's result
+ result_tuple = future.result()
+ downloaded_files_from_future, skipped_files_from_future, \
+ kept_originals_from_future, retryable_failures_from_post, \
+ permanent_failures_from_post, history_data_from_worker = result_tuple
+ if history_data_from_worker: # if worker returned history data
+ self._add_to_history_candidates(history_data_from_worker)
with self .downloaded_files_lock :
self .download_counter +=downloaded_files_from_future
- self .skip_counter +=skipped_files_from_future
+ self .skip_counter +=skipped_files_from_future # type: ignore
if kept_originals_from_future :
self .all_kept_original_filenames .extend (kept_originals_from_future )
@@ -6854,6 +7374,52 @@ class DownloaderApp (QWidget ):
QApplication .processEvents ()
self .log_signal .emit ("🏁 All submitted post tasks have completed or failed.")
self .finished_signal .emit (self .download_counter ,self .skip_counter ,self .cancellation_event .is_set (),self .all_kept_original_filenames )
+
+ def _add_to_history_candidates(self, history_data):
+ """Adds processed post data to the history candidates list."""
+ if history_data and len(self.download_history_candidates) < 8:
+ history_data['download_date_timestamp'] = time.time()
+ creator_key = (history_data.get('service', '').lower(), str(history_data.get('user_id', '')))
+ history_data['creator_name'] = self.creator_name_cache.get(creator_key, history_data.get('user_id', 'Unknown'))
+ self.download_history_candidates.append(history_data)
+
+ def _finalize_download_history(self):
+ """Processes candidates and selects the final 3 history entries.
+ Only updates final_download_history_entries if new candidates are available.
+ """
+ if not self.download_history_candidates:
+ # No new candidates from this session, so don't touch existing
+ # final_download_history_entries (which might be from a previous session).
+ self.log_signal.emit("ℹ️ No new history candidates from this session. Preserving existing history.")
+ # It's important to clear the candidates buffer for the next session,
+ # even if we don't use them this time.
+ self.download_history_candidates.clear()
+ return
+
+ candidates = list(self.download_history_candidates)
+ now = datetime.datetime.now(datetime.timezone.utc) # Use timezone-aware now
+
+ def get_sort_key(entry):
+ upload_date_str = entry.get('upload_date_str')
+ if not upload_date_str:
+ return datetime.timedelta.max # Push entries with no date to the end
+ try:
+ # Attempt to parse ISO format, make it offset-naive if necessary
+ upload_dt = datetime.datetime.fromisoformat(upload_date_str.replace('Z', '+00:00'))
+ if upload_dt.tzinfo is None: # If still naive, assume UTC
+ upload_dt = upload_dt.replace(tzinfo=datetime.timezone.utc)
+ return abs(now - upload_dt)
+ except ValueError:
+ return datetime.timedelta.max # Push unparseable dates to the end
+
+ candidates.sort(key=get_sort_key)
+ self.final_download_history_entries = candidates[:3]
+ self.log_signal.emit(f"ℹ️ Finalized download history: {len(self.final_download_history_entries)} entries selected.")
+ self.download_history_candidates.clear() # Clear candidates after processing
+
+ # Always save the current state of final_download_history_entries
+ self._save_persistent_history()
+
def _get_configurable_widgets_on_pause (self ):
"""Returns a list of widgets that should be re-enabled when paused."""
return [
@@ -7145,6 +7711,7 @@ class DownloaderApp (QWidget ):
if kept_original_names_list is None :
kept_original_names_list =[]
+ self._finalize_download_history() # Finalize history before UI updates
status_message =self ._tr ("status_cancelled_by_user","Cancelled by user")if cancelled_by_user else self ._tr ("status_completed","Completed")
if cancelled_by_user and self .retryable_failed_files_info :
self .log_signal .emit (f" Download cancelled, discarding {len (self .retryable_failed_files_info )} file(s) that were pending retry.")
@@ -7183,6 +7750,10 @@ class DownloaderApp (QWidget ):
self .download_thread .missed_character_post_signal .disconnect (self .handle_missed_character_post )
if hasattr (self .download_thread ,'retryable_file_failed_signal'):
self .download_thread .retryable_file_failed_signal .disconnect (self ._handle_retryable_file_failure )
+ if hasattr(self.download_thread, 'file_successfully_downloaded_signal'): # Disconnect new signal
+ self.download_thread.file_successfully_downloaded_signal.disconnect(self._handle_actual_file_downloaded)
+ if hasattr(self.download_thread, 'post_processed_for_history_signal'): # Disconnect new signal
+ self.download_thread.post_processed_for_history_signal.disconnect(self._add_to_history_candidates)
except (TypeError ,RuntimeError )as e :
self .log_signal .emit (f"ℹ️ Note during single-thread signal disconnection: {e }")
@@ -7569,6 +8140,8 @@ class DownloaderApp (QWidget ):
("help_guide_step7_title","help_guide_step7_content"),
("help_guide_step8_title","help_guide_step8_content"),
("help_guide_step9_title","help_guide_step9_content"),
+ ("column_header_post_title", "Post Title"), # For EmptyPopupDialog
+ ("column_header_date_uploaded", "Date Uploaded"), # For EmptyPopupDialog
]
steps =[
@@ -7925,16 +8498,43 @@ if __name__ =='__main__':
import sys
import os
import time
- def log_error_to_file (exc_info_tuple ):
- log_file_path =os .path .join (os .path .dirname (sys .executable )if getattr (sys ,'frozen',False )else os .path .dirname (__file__ ),"critical_error_log.txt")
- with open (log_file_path ,"a",encoding ="utf-8")as f :
- f .write (f"Timestamp: {time .strftime ('%Y-%m-%d %H:%M:%S')}\n")
- traceback .print_exception (*exc_info_tuple ,file =f )
- f .write ("-"*80 +"\n\n")
+
+ def handle_uncaught_exception(exc_type, exc_value, exc_traceback):
+ """Handles uncaught exceptions by logging them to a file."""
+ # Determine base_dir for logs
+ if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
+ # PyInstaller-like bundle
+ base_dir_for_log = sys._MEIPASS
+ else:
+ # Running as a script
+ base_dir_for_log = os.path.dirname(os.path.abspath(__file__))
+
+ log_dir = os.path.join(base_dir_for_log, "logs")
+ log_file_path = os.path.join(log_dir, "uncaught_exceptions.log")
+
+ try:
+ os.makedirs(os.path.dirname(log_file_path), exist_ok=True)
+ with open(log_file_path, "a", encoding="utf-8") as f:
+ f.write(f"Timestamp: {time.strftime('%Y-%m-%d %H:%M:%S')}\n")
+ traceback.print_exception(exc_type, exc_value, exc_traceback, file=f)
+ f.write("-" * 80 + "\n\n")
+ except Exception as log_ex:
+ # If logging itself fails, print to stderr
+ print(f"CRITICAL: Failed to write to uncaught_exceptions.log: {log_ex}", file=sys.stderr)
+ traceback.print_exception(exc_type, exc_value, exc_traceback, file=sys.stderr) # Log original exception to stderr
+ sys.__excepthook__(exc_type, exc_value, exc_traceback) # Call the default excepthook
+
+ sys.excepthook = handle_uncaught_exception # Set the custom excepthook
+
try :
qt_app =QApplication (sys .argv )
- if getattr (sys ,'frozen',False ):base_dir =sys ._MEIPASS
- else :base_dir =os .path .dirname (os .path .abspath (__file__ ))
+ # Set these after QApplication is initialized and before they might be needed
+ QCoreApplication.setOrganizationName(CONFIG_ORGANIZATION_NAME)
+ QCoreApplication.setApplicationName(CONFIG_APP_NAME_MAIN) # Using the same name as for QSettings path part
+ if getattr (sys ,'frozen',False ) and hasattr(sys, '_MEIPASS'): # Check for _MEIPASS for PyInstaller
+ base_dir =sys ._MEIPASS
+ else: # This 'else' now correctly follows its 'if'
+ base_dir =os .path .dirname (os .path .abspath (__file__ ))
icon_path =os .path .join (base_dir , 'assets', 'Kemono.ico')
if os .path .exists (icon_path ):qt_app .setWindowIcon (QIcon (icon_path ))
else :print (f"Warning: Application icon 'assets/Kemono.ico' not found at {icon_path }")