This commit is contained in:
Yuvi9587 2025-05-22 07:03:05 +05:30
parent a6383b20a4
commit fd84de7bce
5 changed files with 363 additions and 50 deletions

View File

@ -0,0 +1 @@
([Yor], Yor Briar, Yor Forger)

View File

@ -63,6 +63,49 @@ ARCHIVE_EXTENSIONS = {
'.zip', '.rar', '.7z', '.tar', '.gz', '.bz2'
}
# --- Cookie Helper Functions ---
def parse_cookie_string(cookie_string):
"""Parses a 'name=value; name2=value2' cookie string into a dict."""
cookies = {}
if cookie_string:
for item in cookie_string.split(';'):
parts = item.split('=', 1)
if len(parts) == 2:
name = parts[0].strip()
value = parts[1].strip()
if name: # Ensure name is not empty
cookies[name] = value
return cookies if cookies else None
def load_cookies_from_netscape_file(filepath, logger_func):
"""Loads cookies from a Netscape-formatted cookies.txt file."""
cookies = {}
try:
with open(filepath, 'r', encoding='utf-8') as f:
for line_num, line in enumerate(f, 1):
line = line.strip()
if not line or line.startswith('#'):
continue
parts = line.split('\t')
if len(parts) == 7:
# Netscape format: domain, flag, path, secure, expiration, name, value
name = parts[5]
value = parts[6]
if name: # Ensure name is not empty
cookies[name] = value
# else:
# logger_func(f" 🍪 Cookie file line {line_num} malformed (expected 7 tab-separated parts): '{line[:50]}...'")
logger_func(f" 🍪 Loaded {len(cookies)} cookies from '{os.path.basename(filepath)}'.")
return cookies if cookies else None
except FileNotFoundError:
logger_func(f" 🍪 Cookie file '{os.path.basename(filepath)}' not found at expected location.")
return None
except Exception as e:
logger_func(f" 🍪 Error parsing cookie file '{os.path.basename(filepath)}': {e}")
return None
# --- End Cookie Helper Functions ---
def is_title_match_for_character(post_title, character_name_filter):
if not post_title or not character_name_filter:
return False
@ -231,12 +274,31 @@ def extract_post_info(url_string):
return None, None, None
def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_event=None, pause_event=None):
def prepare_cookies_for_request(use_cookie_flag, cookie_text_input, selected_cookie_file_path, app_base_dir, logger_func):
"""Prepares a cookie dictionary from text input or cookies.txt file."""
if not use_cookie_flag:
return None
if cookie_text_input:
logger_func(" 🍪 Using cookies from UI text input.")
return parse_cookie_string(cookie_text_input)
elif selected_cookie_file_path:
logger_func(f" 🍪 Attempting to load cookies from selected file: '{os.path.basename(selected_cookie_file_path)}'...")
return load_cookies_from_netscape_file(selected_cookie_file_path, logger_func)
elif app_base_dir:
cookies_filepath = os.path.join(app_base_dir, "cookies.txt")
logger_func(f" 🍪 No UI text or specific file selected. Attempting to load default '{os.path.basename(cookies_filepath)}' from app directory...")
return load_cookies_from_netscape_file(cookies_filepath, logger_func)
else:
logger_func(" 🍪 Cookie usage enabled, but no text input, specific file, or app base directory provided for cookies.txt.")
return None
def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_event=None, pause_event=None, cookies_dict=None):
if cancellation_event and cancellation_event.is_set():
logger(" Fetch cancelled before request.")
raise RuntimeError("Fetch operation cancelled by user.")
if pause_event and pause_event.is_set():
if pause_event and pause_event.is_set(): # type: ignore
logger(" Post fetching paused...")
while pause_event.is_set():
if cancellation_event and cancellation_event.is_set():
@ -248,7 +310,7 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
paginated_url = f'{api_url_base}?o={offset}'
logger(f" Fetching: {paginated_url} (Page approx. {offset // 50 + 1})")
try:
response = requests.get(paginated_url, headers=headers, timeout=(10, 60))
response = requests.get(paginated_url, headers=headers, timeout=(10, 60), cookies=cookies_dict)
response.raise_for_status()
if 'application/json' not in response.headers.get('Content-Type', '').lower():
logger(f"⚠️ Unexpected content type from API: {response.headers.get('Content-Type')}. Body: {response.text[:200]}")
@ -266,12 +328,12 @@ def fetch_posts_paginated(api_url_base, headers, offset, logger, cancellation_ev
except Exception as e:
raise RuntimeError(f"Unexpected error fetching offset {offset} ({paginated_url}): {e}")
def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger, cancellation_event=None, pause_event=None):
def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger, cancellation_event=None, pause_event=None, cookies_dict=None):
if cancellation_event and cancellation_event.is_set():
logger(" Comment fetch cancelled before request.")
raise RuntimeError("Comment fetch operation cancelled by user.")
if pause_event and pause_event.is_set():
if pause_event and pause_event.is_set(): # type: ignore
logger(" Comment fetching paused...")
while pause_event.is_set():
if cancellation_event and cancellation_event.is_set():
@ -283,7 +345,7 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
comments_api_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}/post/{post_id}/comments"
logger(f" Fetching comments: {comments_api_url}")
try:
response = requests.get(comments_api_url, headers=headers, timeout=(10, 30)) # Shorter timeout for comments
response = requests.get(comments_api_url, headers=headers, timeout=(10, 30), cookies=cookies_dict)
response.raise_for_status()
if 'application/json' not in response.headers.get('Content-Type', '').lower():
logger(f"⚠️ Unexpected content type from comments API: {response.headers.get('Content-Type')}. Body: {response.text[:200]}")
@ -301,7 +363,8 @@ def fetch_post_comments(api_domain, service, user_id, post_id, headers, logger,
except Exception as e:
raise RuntimeError(f"Unexpected error fetching comments for post {post_id} ({comments_api_url}): {e}")
def download_from_api(api_url_input, logger=print, start_page=None, end_page=None, manga_mode=False, cancellation_event=None, pause_event=None):
def download_from_api(api_url_input, logger=print, start_page=None, end_page=None, manga_mode=False,
cancellation_event=None, pause_event=None, use_cookie=False, cookie_text="", selected_cookie_file=None, app_base_dir=None):
headers = {'User-Agent': 'Mozilla/5.0', 'Accept': 'application/json'}
service, user_id, target_post_id = extract_post_info(api_url_input)
@ -326,6 +389,11 @@ def download_from_api(api_url_input, logger=print, start_page=None, end_page=Non
api_domain = "kemono.su"
api_base_url = f"https://{api_domain}/api/v1/{service}/user/{user_id}"
cookies_for_api = None
if use_cookie and app_base_dir: # app_base_dir is needed for cookies.txt path
cookies_for_api = prepare_cookies_for_request(use_cookie, cookie_text, selected_cookie_file, app_base_dir, logger)
page_size = 50
if is_creator_feed_for_manga:
@ -334,7 +402,7 @@ def download_from_api(api_url_input, logger=print, start_page=None, end_page=Non
current_offset_manga = 0
while True:
if pause_event and pause_event.is_set():
logger(" Manga mode post fetching paused...")
logger(" Manga mode post fetching paused...") # type: ignor
while pause_event.is_set():
if cancellation_event and cancellation_event.is_set():
logger(" Manga mode post fetching cancelled while paused.")
@ -345,7 +413,7 @@ def download_from_api(api_url_input, logger=print, start_page=None, end_page=Non
logger(" Manga mode post fetching cancelled.")
break
try:
posts_batch_manga = fetch_posts_paginated(api_base_url, headers, current_offset_manga, logger, cancellation_event, pause_event)
posts_batch_manga = fetch_posts_paginated(api_base_url, headers, current_offset_manga, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api)
if not isinstance(posts_batch_manga, list):
logger(f"❌ API Error (Manga Mode): Expected list of posts, got {type(posts_batch_manga)}.")
break
@ -413,7 +481,7 @@ def download_from_api(api_url_input, logger=print, start_page=None, end_page=Non
while True:
if pause_event and pause_event.is_set():
logger(" Post fetching loop paused...")
logger(" Post fetching loop paused...") # type: ignore
while pause_event.is_set():
if cancellation_event and cancellation_event.is_set():
logger(" Post fetching loop cancelled while paused.")
@ -432,7 +500,7 @@ def download_from_api(api_url_input, logger=print, start_page=None, end_page=Non
break
try:
posts_batch = fetch_posts_paginated(api_base_url, headers, current_offset, logger, cancellation_event, pause_event)
posts_batch = fetch_posts_paginated(api_base_url, headers, current_offset, logger, cancellation_event, pause_event, cookies_dict=cookies_for_api)
if not isinstance(posts_batch, list):
logger(f"❌ API Error: Expected list of posts, got {type(posts_batch)} at page {current_page_num} (offset {current_offset}).")
break
@ -529,6 +597,10 @@ class PostProcessorWorker:
char_filter_scope=CHAR_SCOPE_FILES,
remove_from_filename_words_list=None,
allow_multipart_download=True,
cookie_text="", # Added missing parameter
use_cookie=False, # Added missing parameter
selected_cookie_file=None, # Added missing parameter
app_base_dir=None, # New parameter for app's base directory
manga_date_file_counter_ref=None, # New parameter for date-based manga naming
):
self.post = post_data
@ -574,7 +646,11 @@ class PostProcessorWorker:
self.remove_from_filename_words_list = remove_from_filename_words_list if remove_from_filename_words_list is not None else []
self.allow_multipart_download = allow_multipart_download
self.manga_date_file_counter_ref = manga_date_file_counter_ref # Store the reference
self.selected_cookie_file = selected_cookie_file # Store selected cookie file path
self.app_base_dir = app_base_dir # Store app base dir
self.cookie_text = cookie_text # Store cookie text
self.use_cookie = use_cookie # Store cookie setting
if self.compress_images and Image is None:
self.logger("⚠️ Image compression disabled: Pillow library not found.")
self.compress_images = False
@ -636,6 +712,9 @@ class PostProcessorWorker:
if self.check_cancel() or (skip_event and skip_event.is_set()): return 0, 1, "", False
file_url = file_info.get('url')
cookies_to_use_for_file = None
if self.use_cookie: # This flag comes from the checkbox
cookies_to_use_for_file = prepare_cookies_for_request(self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger)
api_original_filename = file_info.get('_original_name_for_log', file_info.get('name'))
# This is the ideal name for the file if it were to be saved in the main target_folder_path.
@ -777,7 +856,7 @@ class PostProcessorWorker:
self._emit_signal('file_download_status', True)
response = requests.get(file_url, headers=headers, timeout=(15, 300), stream=True)
response = requests.get(file_url, headers=headers, timeout=(15, 300), stream=True, cookies=cookies_to_use_for_file)
response.raise_for_status()
total_size_bytes = int(response.headers.get('Content-Length', 0))
@ -795,7 +874,7 @@ class PostProcessorWorker:
mp_save_path_base_for_part = os.path.join(target_folder_path, filename_to_save_in_main_path)
mp_success, mp_bytes, mp_hash, mp_file_handle = download_file_in_parts(
file_url, mp_save_path_base_for_part, total_size_bytes, num_parts_for_file, headers, api_original_filename,
emitter_for_multipart=self.emitter, # Pass the worker's emitter
emitter_for_multipart=self.emitter, cookies_for_chunk_session=cookies_to_use_for_file, # Pass cookies
cancellation_event=self.cancellation_event, skip_event=skip_event, logger_func=self.logger,
pause_event=self.pause_event # Pass pause_event
)
@ -1140,7 +1219,10 @@ class PostProcessorWorker:
comments_data = fetch_post_comments(
api_domain_for_comments, self.service, self.user_id, post_id,
headers, self.logger, self.cancellation_event, self.pause_event # Pass pause_event
headers, self.logger, self.cancellation_event, self.pause_event, # Pass pause_event
cookies_dict=prepare_cookies_for_request( # Prepare cookies for this API call
self.use_cookie, self.cookie_text, self.selected_cookie_file, self.app_base_dir, self.logger
)
)
if comments_data:
self.logger(f" Fetched {len(comments_data)} comments for post {post_id}.")
@ -1536,6 +1618,8 @@ class DownloadThread(QThread):
char_filter_scope=CHAR_SCOPE_FILES, # manga_date_file_counter_ref removed from here
remove_from_filename_words_list=None,
allow_multipart_download=True,
selected_cookie_file=None, # New parameter for selected cookie file
app_base_dir=None, # New parameter
manga_date_file_counter_ref=None, # New parameter
):
super().__init__()
@ -1580,6 +1664,10 @@ class DownloadThread(QThread):
self.char_filter_scope = char_filter_scope
self.remove_from_filename_words_list = remove_from_filename_words_list
self.allow_multipart_download = allow_multipart_download
self.selected_cookie_file = selected_cookie_file # Store selected cookie file
self.app_base_dir = app_base_dir # Store app base dir
self.cookie_text = cookie_text # Store cookie text
self.use_cookie = use_cookie # Store cookie setting
self.manga_date_file_counter_ref = manga_date_file_counter_ref # Store for passing to worker by DownloadThread
# self.manga_date_scan_dir = manga_date_scan_dir # Store scan directory
if self.compress_images and Image is None:
@ -1660,8 +1748,12 @@ class DownloadThread(QThread):
start_page=self.start_page,
end_page=self.end_page,
manga_mode=self.manga_mode_active,
cancellation_event=self.cancellation_event,
pause_event=self.pause_event # Pass pause_event
cancellation_event=self.cancellation_event, # type: ignore
pause_event=self.pause_event, # Pass pause_event
use_cookie=self.use_cookie, # Pass cookie settings for API calls
cookie_text=self.cookie_text,
selected_cookie_file=self.selected_cookie_file,
app_base_dir=self.app_base_dir
)
for posts_batch_data in post_generator:
@ -1704,6 +1796,10 @@ class DownloadThread(QThread):
char_filter_scope=self.char_filter_scope,
remove_from_filename_words_list=self.remove_from_filename_words_list,
allow_multipart_download=self.allow_multipart_download,
selected_cookie_file=self.selected_cookie_file, # Pass selected cookie file
app_base_dir=self.app_base_dir, # Pass app_base_dir
cookie_text=self.cookie_text, # Pass cookie text
use_cookie=self.use_cookie, # Pass cookie setting to worker
manga_date_file_counter_ref=current_manga_date_file_counter_ref, # Pass the calculated or passed-in ref
)
try:

233
main.py
View File

@ -103,6 +103,8 @@ STYLE_DATE_BASED = "date_based" # New style for date-based naming
SKIP_WORDS_SCOPE_KEY = "skipWordsScopeV1"
ALLOW_MULTIPART_DOWNLOAD_KEY = "allowMultipartDownloadV1"
USE_COOKIE_KEY = "useCookieV1" # New setting key
COOKIE_TEXT_KEY = "cookieTextV1" # New setting key for cookie text
CHAR_FILTER_SCOPE_KEY = "charFilterScopeV1"
# CHAR_SCOPE_TITLE, CHAR_SCOPE_FILES, CHAR_SCOPE_BOTH, CHAR_SCOPE_COMMENTS are already defined or imported
@ -503,6 +505,7 @@ class DownloaderApp(QWidget):
# sys.executable is the path to the .exe file
app_base_dir = os.path.dirname(sys.executable)
else:
# This is the directory where main.py (and thus potentially cookies.txt) resides
# Application is running as a script
# __file__ is the path to the script file
app_base_dir = os.path.dirname(os.path.abspath(__file__))
@ -518,6 +521,7 @@ class DownloaderApp(QWidget):
self.processed_posts_count = 0
self.download_counter = 0
self.skip_counter = 0
self.selected_cookie_filepath = None # For storing path from browse button
self.retryable_failed_files_info = [] # For storing info about files that failed but can be retried
self.is_paused = False # New state for pause functionality
@ -578,6 +582,11 @@ class DownloaderApp(QWidget):
self.char_filter_scope = self.settings.value(CHAR_FILTER_SCOPE_KEY, CHAR_SCOPE_FILES, type=str) # Default to Files
# Always default multi-part download to OFF on launch, ignoring any saved setting.
self.allow_multipart_download_setting = False
# Ensure "Use Cookie" is unchecked and text is empty by default on every launch
self.use_cookie_setting = False # Always default to False on launch
self.cookie_text_setting = "" # Always default to empty on launch
print(f" Known.txt will be loaded/saved at: {self.config_file}")
@ -597,6 +606,8 @@ class DownloaderApp(QWidget):
self.log_signal.emit(f" Skip words scope loaded: '{self.skip_words_scope}'")
self.log_signal.emit(f" Character filter scope loaded: '{self.char_filter_scope}'")
self.log_signal.emit(f" Multi-part download defaults to: {'Enabled' if self.allow_multipart_download_setting else 'Disabled'} on launch")
self.log_signal.emit(f" Cookie text defaults to: Empty on launch")
self.log_signal.emit(f" 'Use Cookie' setting defaults to: Disabled on launch")
def _connect_signals(self):
@ -610,6 +621,12 @@ class DownloaderApp(QWidget):
if hasattr(self, 'character_input'): # Connect live update for character input
self.character_input.textChanged.connect(self._on_character_input_changed_live)
# Timer for processing the worker queue
if hasattr(self, 'use_cookie_checkbox'):
self.use_cookie_checkbox.toggled.connect(self._update_cookie_input_visibility)
if hasattr(self, 'cookie_browse_button'): # Connect the new browse button
self.cookie_browse_button.clicked.connect(self._browse_cookie_file)
if hasattr(self, 'cookie_text_input'): # Connect text changed for manual clear detection
self.cookie_text_input.textChanged.connect(self._handle_cookie_text_manual_change)
self.gui_update_timer.timeout.connect(self._process_worker_queue)
self.gui_update_timer.start(100) # Check queue every 100ms
@ -749,14 +766,41 @@ class DownloaderApp(QWidget):
content = line[1:-1].strip()
parts = [p.strip() for p in content.split(',') if p.strip()]
if parts:
primary_name = parts[0]
# Aliases include the primary name for matching convenience
unique_aliases = sorted(list(set([primary_name] + parts)))
parsed_known_objects.append({
"name": primary_name,
"is_group": True,
"aliases": unique_aliases
})
potential_primary_name = None
all_aliases_in_line = []
remaining_parts = list(parts) # Create a mutable copy
# First, find and process the bracketed primary name
for i, part_check_brackets in enumerate(parts):
if part_check_brackets.startswith('[') and part_check_brackets.endswith(']'):
potential_primary_name = part_check_brackets[1:-1].strip()
if potential_primary_name:
all_aliases_in_line.append(potential_primary_name)
remaining_parts.pop(i) # Remove the processed primary name part
break # Found the bracketed name
# If no bracketed name was found, use the first part as primary (fallback)
if not potential_primary_name and parts:
potential_primary_name = parts[0].strip()
all_aliases_in_line.append(potential_primary_name)
if remaining_parts and remaining_parts[0] == potential_primary_name: # Avoid double-adding if it was the first
remaining_parts.pop(0)
# Add remaining parts as unique aliases
for part in remaining_parts:
cleaned_part = part.strip()
if cleaned_part and cleaned_part not in all_aliases_in_line:
all_aliases_in_line.append(cleaned_part)
if not potential_primary_name: # Should not happen if parts is not empty
if hasattr(self, 'log_signal'): self.log_signal.emit(f"⚠️ Could not determine primary name in Known.txt on line {line_num}: '{line}'")
continue
else: # This is the 'else' statement from the error
parsed_known_objects.append({ # This block needs to be indented
"name": potential_primary_name,
"is_group": True,
"aliases": all_aliases_in_line # Already unique and primary is first
})
else:
if hasattr(self, 'log_signal'): self.log_signal.emit(f"⚠️ Empty group found in Known.txt on line {line_num}: '{line}'")
else:
@ -787,14 +831,14 @@ class DownloaderApp(QWidget):
# Add default entries if the list is empty after loading (meaning file didn't exist)
if not KNOWN_NAMES:
default_entry = {
"name": "Boa Hancock",
"name": "Yor",
"is_group": True,
"aliases": sorted(list(set(["Boa Hancock", "Boa", "Hancock", "Snakequeen"]))) # Ensure unique and sorted aliases
"aliases": sorted(list(set(["Yor Forger", "Yor", "Yor Briar"]))) # Ensure unique and sorted aliases
}
KNOWN_NAMES.append(default_entry)
# Add more defaults here if needed
self.save_known_names() # Save to disk immediately if file was created with defaults
self.log_signal.emit(" Added default entry for 'Boa Hancock'.")
self.log_signal.emit(" Added default entry for 'Yor Forger'.")
self.character_list.addItems([entry["name"] for entry in KNOWN_NAMES])
@ -805,11 +849,15 @@ class DownloaderApp(QWidget):
with open(self.config_file, 'w', encoding='utf-8') as f:
for entry in KNOWN_NAMES:
if entry["is_group"] and len(entry["aliases"]) > 1:
# Join all aliases with ", " for readability
joined_aliases = ", ".join(entry["aliases"])
f.write(f"({joined_aliases})\n")
# New format: Mark primary name (first alias) with brackets
primary_name = entry['aliases'][0]
other_aliases = entry['aliases'][1:]
formatted_aliases_for_file = [f"[{primary_name}]"] + other_aliases
f.write(f"({', '.join(formatted_aliases_for_file)})\n")
elif entry["is_group"] and len(entry["aliases"]) == 1: # Group with only one name (the primary)
f.write(f"([{entry['aliases'][0]}])\n") # Save as ([PrimaryName])
else: # Simple entry or group with only one alias (the name itself)
f.write(entry["name"] + '\n')
f.write(entry["name"] + '\n') # Non-grouped items are saved as plain names
if hasattr(self, 'log_signal'): self.log_signal.emit(f"💾 Saved {len(KNOWN_NAMES)} known entries to {self.config_file}")
except Exception as e:
log_msg = f"❌ Error saving config '{self.config_file}': {e}"
@ -822,6 +870,8 @@ class DownloaderApp(QWidget):
self.settings.setValue(SKIP_WORDS_SCOPE_KEY, self.skip_words_scope)
self.settings.setValue(CHAR_FILTER_SCOPE_KEY, self.char_filter_scope)
self.settings.setValue(ALLOW_MULTIPART_DOWNLOAD_KEY, self.allow_multipart_download_setting)
self.settings.setValue(COOKIE_TEXT_KEY, self.cookie_text_input.text() if hasattr(self, 'cookie_text_input') else "")
self.settings.setValue(USE_COOKIE_KEY, self.use_cookie_checkbox.isChecked() if hasattr(self, 'use_cookie_checkbox') else False)
self.settings.sync()
should_exit = True
@ -1113,6 +1163,30 @@ class DownloaderApp(QWidget):
)
self.use_subfolder_per_post_checkbox.toggled.connect(self.update_ui_for_subfolders)
advanced_row1_layout.addWidget(self.use_subfolder_per_post_checkbox)
self.use_cookie_checkbox = QCheckBox("Use Cookie")
self.use_cookie_checkbox.setToolTip("If checked, will attempt to use cookies from 'cookies.txt' (Netscape format)\n"
"in the application directory for requests.\n"
"Useful for accessing content that requires login on Kemono/Coomer.")
self.use_cookie_checkbox.setChecked(self.use_cookie_setting) # Set from loaded setting
self.cookie_text_input = QLineEdit()
self.cookie_text_input.setPlaceholderText("if no Select cookies.txt)")
self.cookie_text_input.setMinimumHeight(28) # Slightly increase height for better visibility
self.cookie_text_input.setToolTip("Enter your cookie string directly.\n"
"This will be used if 'Use Cookie' is checked AND 'cookies.txt' is not found or this field is not empty.\n"
"The format depends on how the backend will parse it (e.g., 'name1=value1; name2=value2').")
self.cookie_text_input.setText(self.cookie_text_setting) # Set from loaded setting
advanced_row1_layout.addWidget(self.use_cookie_checkbox)
advanced_row1_layout.addWidget(self.cookie_text_input, 2) # Stretch factor 2
self.cookie_browse_button = QPushButton("Browse...")
self.cookie_browse_button.setToolTip("Browse for a cookie file (Netscape format, typically cookies.txt).\nThis will be used if 'Use Cookie' is checked and the text field above is empty.")
self.cookie_browse_button.setFixedWidth(80) # Make it a bit compact
self.cookie_browse_button.setStyleSheet("padding: 4px 8px;")
advanced_row1_layout.addWidget(self.cookie_browse_button)
advanced_row1_layout.addStretch(1)
checkboxes_group_layout.addLayout(advanced_row1_layout)
@ -1354,6 +1428,7 @@ class DownloaderApp(QWidget):
self.update_ui_for_manga_mode(self.manga_mode_checkbox.isChecked())
if hasattr(self, 'link_input'): self.link_input.textChanged.connect(lambda: self.update_ui_for_manga_mode(self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False)) # Also trigger manga UI update
self.load_known_names_from_util()
self._update_cookie_input_visibility(self.use_cookie_checkbox.isChecked() if hasattr(self, 'use_cookie_checkbox') else False) # Initial visibility
self._handle_multithreading_toggle(self.use_multithreading_checkbox.isChecked())
if hasattr(self, 'radio_group') and self.radio_group.checkedButton():
self._handle_filter_mode_change(self.radio_group.checkedButton(), True)
@ -1362,9 +1437,28 @@ class DownloaderApp(QWidget):
self._update_char_filter_scope_button_text()
self._update_multithreading_for_date_mode() # Ensure correct initial state
def _browse_cookie_file(self):
"""Opens a file dialog to select a cookie file."""
# Start in the user's documents directory or current app dir if not available
start_dir = QStandardPaths.writableLocation(QStandardPaths.DocumentsLocation)
if not start_dir:
start_dir = os.path.dirname(self.config_file) # App directory
filepath, _ = QFileDialog.getOpenFileName(self, "Select Cookie File", start_dir, "Text files (*.txt);;All files (*)")
if filepath:
self.selected_cookie_filepath = filepath
self.log_signal.emit(f" Selected cookie file: {filepath}")
if hasattr(self, 'cookie_text_input'):
# Block signals temporarily to prevent textChanged handler from misinterpreting this programmatic change
self.cookie_text_input.blockSignals(True)
self.cookie_text_input.setText(filepath)
self.cookie_text_input.setReadOnly(True)
self.cookie_text_input.setPlaceholderText("") # No placeholder when showing a path
self.cookie_text_input.blockSignals(False)
def _center_on_screen(self):
"""Centers the widget on the screen."""
# Updated to use availableGeometry and center more reliably
# Updated to use availableGeometry and center more reliably
try:
primary_screen = QApplication.primaryScreen()
if not primary_screen:
@ -1381,6 +1475,19 @@ class DownloaderApp(QWidget):
except Exception as e:
self.log_signal.emit(f"⚠️ Error centering window: {e}")
def _handle_cookie_text_manual_change(self, text):
"""Handles manual changes to the cookie text input, especially clearing a browsed path."""
if not hasattr(self, 'cookie_text_input') or not hasattr(self, 'use_cookie_checkbox'):
return
# If a file was selected AND the text field is now empty (user deleted the path)
if self.selected_cookie_filepath and not text.strip() and self.use_cookie_checkbox.isChecked():
self.selected_cookie_filepath = None
self.cookie_text_input.setReadOnly(False)
self.cookie_text_input.setPlaceholderText("Cookie string (if no cookies.txt)")
self.log_signal.emit(" Browsed cookie file path cleared from input. Switched to manual cookie string mode.")
def get_dark_theme(self):
return """
QWidget { background-color: #2E2E2E; color: #E0E0E0; font-family: Segoe UI, Arial, sans-serif; font-size: 10pt; }
@ -2105,6 +2212,10 @@ class DownloaderApp(QWidget):
if self.use_subfolder_per_post_checkbox:
self.use_subfolder_per_post_checkbox.setEnabled(not is_only_links and not is_only_archives)
if hasattr(self, 'use_cookie_checkbox'):
self.use_cookie_checkbox.setEnabled(not is_only_links) # Cookies might be relevant for archives
enable_character_filter_related_widgets = checked and not is_only_links and not is_only_archives
@ -2119,6 +2230,38 @@ class DownloaderApp(QWidget):
self.update_custom_folder_visibility()
def _update_cookie_input_visibility(self, checked):
cookie_text_input_exists = hasattr(self, 'cookie_text_input')
cookie_browse_button_exists = hasattr(self, 'cookie_browse_button')
if cookie_text_input_exists or cookie_browse_button_exists:
is_only_links = self.radio_only_links and self.radio_only_links.isChecked()
# Cookie text input and browse button are visible if "Use Cookie" is checked
if cookie_text_input_exists: self.cookie_text_input.setVisible(checked)
if cookie_browse_button_exists: self.cookie_browse_button.setVisible(checked)
can_enable_cookie_text = checked and not is_only_links
enable_state_for_fields = can_enable_cookie_text and (self.download_btn.isEnabled() or self.is_paused)
if cookie_text_input_exists:
# Text input is always enabled if its parent "Use Cookie" is checked and conditions met,
# unless a file path is displayed (then it's read-only).
self.cookie_text_input.setEnabled(enable_state_for_fields)
if self.selected_cookie_filepath and checked: # If a file is selected and "Use Cookie" is on
self.cookie_text_input.setText(self.selected_cookie_filepath)
self.cookie_text_input.setReadOnly(True)
self.cookie_text_input.setPlaceholderText("")
elif checked: # "Use Cookie" is on, but no file selected
self.cookie_text_input.setReadOnly(False)
self.cookie_text_input.setPlaceholderText("Cookie string (if no cookies.txt)")
if cookie_browse_button_exists: self.cookie_browse_button.setEnabled(enable_state_for_fields)
if not checked: # If "Use Cookie" is unchecked, clear the selected file path
self.selected_cookie_filepath = None
def update_page_range_enabled_state(self):
url_text = self.link_input.text().strip() if self.link_input else ""
_, _, post_id = extract_post_info(url_text)
@ -2339,6 +2482,7 @@ class DownloaderApp(QWidget):
use_multithreading_enabled_by_checkbox = self.use_multithreading_checkbox.isChecked()
try:
# num_threads_from_gui is used for post workers or file workers depending on context
num_threads_from_gui = int(self.thread_count_input.text().strip())
if num_threads_from_gui < 1: num_threads_from_gui = 1
except ValueError:
@ -2395,6 +2539,10 @@ class DownloaderApp(QWidget):
raw_remove_filename_words = self.remove_from_filename_input.text().strip() if hasattr(self, 'remove_from_filename_input') else ""
allow_multipart = self.allow_multipart_download_setting # Use the internal setting
remove_from_filename_words_list = [word.strip() for word in raw_remove_filename_words.split(',') if word.strip()]
use_cookie_from_checkbox = self.use_cookie_checkbox.isChecked() if hasattr(self, 'use_cookie_checkbox') else False
app_base_dir_for_cookies = os.path.dirname(self.config_file) # Directory of Known.txt
cookie_text_from_input = self.cookie_text_input.text().strip() if hasattr(self, 'cookie_text_input') and use_cookie_from_checkbox else ""
selected_cookie_file_path_for_backend = self.selected_cookie_filepath if use_cookie_from_checkbox and self.selected_cookie_filepath else None
current_skip_words_scope = self.get_skip_words_scope()
current_char_filter_scope = self.get_char_filter_scope()
manga_mode_is_checked = self.manga_mode_checkbox.isChecked() if self.manga_mode_checkbox else False
@ -2403,6 +2551,10 @@ class DownloaderApp(QWidget):
backend_filter_mode = self.get_filter_mode()
user_selected_filter_text = self.radio_group.checkedButton().text() if self.radio_group.checkedButton() else "All"
# If a file path is selected, cookie_text_from_input should be considered empty for backend logic
if selected_cookie_file_path_for_backend:
cookie_text_from_input = ""
if backend_filter_mode == 'archive':
effective_skip_zip = False
effective_skip_rar = False
@ -2646,6 +2798,7 @@ class DownloaderApp(QWidget):
f" Thumbnails Only: {'Enabled' if download_thumbnails else 'Disabled'}" # Removed duplicate file handling log
])
else:
# If only_links, cookie might still be relevant for accessing the page
log_messages.append(f" Mode: Extracting Links Only")
log_messages.append(f" Show External Links: {'Enabled' if self.show_external_links and not extract_links_only and backend_filter_mode != 'archive' else 'Disabled'}")
@ -2657,6 +2810,11 @@ class DownloaderApp(QWidget):
log_messages.append(f" ↳ Manga Character Filter (for naming/folder): {', '.join(item['name'] for item in filter_character_list_to_pass)}")
log_messages.append(f" ↳ Manga Duplicates: Will be renamed with numeric suffix if names clash (e.g., _1, _2).")
log_messages.append(f" Use Cookie ('cookies.txt'): {'Enabled' if use_cookie_from_checkbox else 'Disabled'}")
if use_cookie_from_checkbox and cookie_text_from_input:
log_messages.append(f" ↳ Cookie Text Provided: Yes (length: {len(cookie_text_from_input)})")
elif use_cookie_from_checkbox and selected_cookie_file_path_for_backend:
log_messages.append(f" ↳ Cookie File Selected: {os.path.basename(selected_cookie_file_path_for_backend)}")
should_use_multithreading_for_posts = use_multithreading_enabled_by_checkbox and not post_id_from_url
# Adjust log message if date-based manga mode forced single thread
if manga_mode and self.manga_filename_style == STYLE_DATE_BASED and not post_id_from_url:
@ -2713,6 +2871,10 @@ class DownloaderApp(QWidget):
'num_file_threads_for_worker': effective_num_file_threads_per_worker,
'manga_date_file_counter_ref': manga_date_file_counter_ref_for_thread,
'allow_multipart_download': allow_multipart,
'cookie_text': cookie_text_from_input, # Pass cookie text
'selected_cookie_file': selected_cookie_file_path_for_backend, # Pass selected cookie file
'app_base_dir': app_base_dir_for_cookies, # Pass app base dir
'use_cookie': use_cookie_from_checkbox, # Pass cookie setting
# 'duplicate_file_mode' and session-wide tracking removed
}
@ -2736,7 +2898,7 @@ class DownloaderApp(QWidget):
'start_page', 'end_page', 'target_post_id_from_initial_url',
'manga_date_file_counter_ref', # Ensure this is passed for single thread mode
'manga_mode_active', 'unwanted_keywords', 'manga_filename_style',
'allow_multipart_download'
'allow_multipart_download', 'use_cookie', 'cookie_text', 'app_base_dir', 'selected_cookie_file' # Added selected_cookie_file
]
args_template['skip_current_file_flag'] = None
single_thread_args = {key: args_template[key] for key in dt_expected_keys if key in args_template}
@ -2938,7 +3100,7 @@ class DownloaderApp(QWidget):
'cancellation_event', 'downloaded_files', 'downloaded_file_hashes',
'downloaded_files_lock', 'downloaded_file_hashes_lock', 'remove_from_filename_words_list', 'dynamic_character_filter_holder', # Added holder
'skip_words_list', 'skip_words_scope', 'char_filter_scope',
'show_external_links', 'extract_links_only', 'allow_multipart_download',
'show_external_links', 'extract_links_only', 'allow_multipart_download', 'use_cookie', 'cookie_text', 'app_base_dir', 'selected_cookie_file', # Added selected_cookie_file
'num_file_threads', 'skip_current_file_flag', 'manga_date_file_counter_ref',
'manga_mode_active', 'manga_filename_style'
]
@ -2947,7 +3109,7 @@ class DownloaderApp(QWidget):
'skip_words_list', 'skip_words_scope', 'char_filter_scope', 'remove_from_filename_words_list',
'show_external_links', 'extract_links_only', 'duplicate_file_mode', # Added duplicate_file_mode here
'num_file_threads', 'skip_current_file_flag', 'manga_mode_active', 'manga_filename_style',
'manga_date_file_counter_ref' # Add this
'manga_date_file_counter_ref', 'use_cookie', 'cookie_text', 'app_base_dir', 'selected_cookie_file' # Added selected_cookie_file
}
# --- Batching Logic ---
@ -3066,7 +3228,10 @@ class DownloaderApp(QWidget):
self.use_subfolders_checkbox, self.use_subfolder_per_post_checkbox,
self.manga_mode_checkbox,
self.manga_rename_toggle_button, # Visibility handled by update_ui_for_manga_mode
self.cookie_browse_button, # Add cookie browse button
self.multipart_toggle_button,
self.cookie_text_input, # Add cookie text input
self.use_cookie_checkbox, # Add cookie checkbox here
self.external_links_checkbox
]
@ -3082,7 +3247,7 @@ class DownloaderApp(QWidget):
self.skip_zip_checkbox, self.skip_rar_checkbox, self.download_thumbnails_checkbox, self.compress_images_checkbox,
self.use_subfolders_checkbox, self.use_subfolder_per_post_checkbox,
self.use_multithreading_checkbox, self.thread_count_input, self.thread_count_label,
self.external_links_checkbox, self.manga_mode_checkbox, self.manga_rename_toggle_button,
self.external_links_checkbox, self.manga_mode_checkbox, self.manga_rename_toggle_button, self.use_cookie_checkbox, self.cookie_text_input, self.cookie_browse_button,
self.multipart_toggle_button,
self.character_search_input, self.new_char_input, self.add_char_button, self.delete_char_button,
self.reset_button
@ -3104,10 +3269,19 @@ class DownloaderApp(QWidget):
if self.external_links_checkbox:
is_only_links = self.radio_only_links and self.radio_only_links.isChecked()
self.external_links_checkbox.setEnabled(enabled and not is_only_links)
if self.is_paused and not is_only_links: # Also re-enable if paused and not in link mode
is_only_archives = self.radio_only_archives and self.radio_only_archives.isChecked()
can_enable_ext_links = enabled and not is_only_links and not is_only_archives
self.external_links_checkbox.setEnabled(can_enable_ext_links)
if self.is_paused and not is_only_links and not is_only_archives:
self.external_links_checkbox.setEnabled(True)
# Handle "Use Cookie" checkbox and text input
if hasattr(self, 'use_cookie_checkbox'):
self.use_cookie_checkbox.setEnabled(enabled or self.is_paused)
self._update_cookie_input_visibility(self.use_cookie_checkbox.isChecked()) # This will handle cookie_text_input's enabled state
if hasattr(self, 'use_cookie_checkbox'): self.use_cookie_checkbox.setEnabled(enabled or self.is_paused)
if self.log_verbosity_toggle_button: self.log_verbosity_toggle_button.setEnabled(True) # New button, always enabled
multithreading_currently_on = self.use_multithreading_checkbox.isChecked()
@ -3176,6 +3350,13 @@ class DownloaderApp(QWidget):
self.use_subfolder_per_post_checkbox.setChecked(False); self.use_multithreading_checkbox.setChecked(True);
self.external_links_checkbox.setChecked(False)
if self.manga_mode_checkbox: self.manga_mode_checkbox.setChecked(False)
if hasattr(self, 'use_cookie_checkbox'): self.use_cookie_checkbox.setChecked(self.use_cookie_setting) # Reset to loaded or False
# For soft reset, if a cookie file was selected, keep it displayed if "Use Cookie" remains checked.
# Otherwise, clear it. The _update_cookie_input_visibility will handle the display.
if not (hasattr(self, 'use_cookie_checkbox') and self.use_cookie_checkbox.isChecked()):
self.selected_cookie_filepath = None
if hasattr(self, 'cookie_text_input'): self.cookie_text_input.setText(self.cookie_text_setting if self.use_cookie_setting else "") # Reset to loaded or empty
# 2. Reset internal state for UI-managed settings to app defaults (not from QSettings)
self.allow_multipart_download_setting = False # Default to OFF
@ -3215,6 +3396,7 @@ class DownloaderApp(QWidget):
# Explicitly call these to ensure they reflect changes from preserved inputs
self.update_custom_folder_visibility(self.link_input.text())
self.update_page_range_enabled_state()
self._update_cookie_input_visibility(self.use_cookie_checkbox.isChecked() if hasattr(self, 'use_cookie_checkbox') else False)
# update_ui_for_manga_mode is called within set_ui_enabled
self.log_signal.emit("✅ Soft UI reset complete. Preserved URL and Directory (if provided).")
@ -3547,6 +3729,12 @@ class DownloaderApp(QWidget):
self.use_subfolder_per_post_checkbox.setChecked(False); self.use_multithreading_checkbox.setChecked(True);
self.external_links_checkbox.setChecked(False)
if self.manga_mode_checkbox: self.manga_mode_checkbox.setChecked(False)
if hasattr(self, 'use_cookie_checkbox'): self.use_cookie_checkbox.setChecked(False) # Default to False on full reset
# On full reset, always clear the selected cookie file path
self.selected_cookie_filepath = None
if hasattr(self, 'cookie_text_input'): self.cookie_text_input.clear() # Clear cookie text on full reset
# Reset old summarization state (if any remnants) and new bold list state
self.missed_title_key_terms_count.clear()
@ -3567,6 +3755,7 @@ class DownloaderApp(QWidget):
self._update_char_filter_scope_button_text()
self.current_log_view = 'progress' # Reset to progress log view
self._update_cookie_input_visibility(False) # Hide cookie text input on full reset
if self.log_view_stack: self.log_view_stack.setCurrentIndex(0)
if self.progress_log_label: self.progress_log_label.setText("📜 Progress Log:")

View File

@ -13,7 +13,7 @@ DOWNLOAD_CHUNK_SIZE_ITER = 1024 * 256 # 256KB for iter_content within a chunk d
def _download_individual_chunk(chunk_url, temp_file_path, start_byte, end_byte, headers,
part_num, total_parts, progress_data, cancellation_event, skip_event, pause_event, global_emit_time_ref, # Added global_emit_time_ref
part_num, total_parts, progress_data, cancellation_event, skip_event, pause_event, global_emit_time_ref, cookies_for_chunk, # Added cookies_for_chunk
logger_func, emitter=None, api_original_filename=None): # Renamed logger, signals to emitter
"""Downloads a single chunk of a file and writes it to the temp file."""
if cancellation_event and cancellation_event.is_set():
@ -78,8 +78,8 @@ def _download_individual_chunk(chunk_url, temp_file_path, start_byte, end_byte,
# Enhanced log message for chunk start
log_msg = f" 🚀 [Chunk {part_num + 1}/{total_parts}] Starting download: bytes {start_byte}-{end_byte if end_byte != -1 else 'EOF'}"
logger_func(log_msg)
print(f"DEBUG_MULTIPART: {log_msg}") # Direct console print for debugging
response = requests.get(chunk_url, headers=chunk_headers, timeout=(10, 120), stream=True)
# print(f"DEBUG_MULTIPART: {log_msg}") # Direct console print for debugging
response = requests.get(chunk_url, headers=chunk_headers, timeout=(10, 120), stream=True, cookies=cookies_for_chunk)
response.raise_for_status()
# For 0-byte files, if end_byte was -1, we expect 0 content.
@ -159,7 +159,8 @@ def _download_individual_chunk(chunk_url, temp_file_path, start_byte, end_byte,
def download_file_in_parts(file_url, save_path, total_size, num_parts, headers, api_original_filename,
emitter_for_multipart, cancellation_event, skip_event, logger_func, pause_event): # Added pause_event
emitter_for_multipart, cookies_for_chunk_session, # Added cookies_for_chunk_session
cancellation_event, skip_event, logger_func, pause_event):
"""
Downloads a file in multiple parts concurrently.
Returns: (download_successful_flag, downloaded_bytes, calculated_file_hash, temp_file_handle_or_None)
@ -221,7 +222,7 @@ def download_file_in_parts(file_url, save_path, total_size, num_parts, headers,
_download_individual_chunk, chunk_url=file_url, temp_file_path=temp_file_path,
start_byte=start, end_byte=end, headers=headers, part_num=i, total_parts=num_parts,
progress_data=progress_data, cancellation_event=cancellation_event, skip_event=skip_event, global_emit_time_ref=progress_data['last_global_emit_time'],
pause_event=pause_event, logger_func=logger_func, emitter=emitter_for_multipart, # Pass pause_event and emitter
pause_event=pause_event, cookies_for_chunk=cookies_for_chunk_session, logger_func=logger_func, emitter=emitter_for_multipart,
api_original_filename=api_original_filename
))

View File

@ -1,4 +1,4 @@
<h1 align="center">Kemono Downloader v3.4.0</h1>
<h1 align="center">Kemono Downloader v3.5.0</h1>
<div align="center">
<img src="https://github.com/Yuvi9587/Kemono-Downloader/blob/main/Read.png" alt="Kemono Downloader"/>
@ -11,8 +11,31 @@ Built with **PyQt5**, this tool is ideal for users who want deep filtering, cust
---
## ✨ What's New in v3.4.0?
## ✨ What's New in v3.5.0?
Version 3.5.0 focuses on enhancing access to content and providing even smarter organization:
### 🍪 Enhanced Cookie Management
- **Access Restricted Content:** Seamlessly download from Kemono/Coomer as if you were logged in by using your browser's cookies.
- **Flexible Input:**
- Directly paste your cookie string (e.g., `name1=value1; name2=value2`).
- Browse and load cookies from a `cookies.txt` file (Netscape format).
- Automatic fallback to a `cookies.txt` file in the application directory if "Use Cookie" is enabled and no other source is specified.
- **Easy Activation:** A simple "Use Cookie" checkbox in the UI controls this feature.
- *Important Note: Cookie settings (text, file path, and enabled state) are configured per session and are not saved when the application is closed. You will need to re-apply them on each launch if needed.*
---
### 🗂️ Advanced `Known.txt` for Smart Folder Organization
- **Fine-Grained Control:** Take your automatic folder organization to the next level with a personalized list of names, series titles, and keywords in `Known.txt`.
- **Primary Names & Aliases:** Define a main folder name and link multiple aliases to it. For example, `([Power], powwr, pwr, Blood devil)` ensures any post matching "Power" or "powwr" (in title or filename, depending on settings) gets saved into a "Power" folder. Simple entries like `My Series` are also supported.
- **Intelligent Fallback:** When "Separate Folders by Name/Title" is active, and if a post doesn't match any specific "Filter by Character(s)" input, the downloader consults `Known.txt` to find a matching primary name for folder creation.
- **User-Friendly Management:** Add or remove primary names directly through the UI, or click "Open Known.txt" for advanced editing (e.g., setting up aliases).
---
## ✨ What's in v3.4.0? (Previous Update)
This version brings significant enhancements to manga/comic downloading, filtering capabilities, and user experience:
### 📖 Enhanced Manga/Comic Mode
@ -185,9 +208,12 @@ This version brings significant enhancements to manga/comic downloading, filteri
### 🗃️ Config System
- **Known.txt:**
- Stores names for smart folder suggestions
- Supports aliases via `(alias1, alias2)`
- **`Known.txt` for Smart Folder Naming:**
- A user-editable file (`Known.txt`) stores a list of preferred names, series titles, or keywords.
- Used as a fallback for folder creation when "Separate Folders by Name/Title" is enabled, helping to group content logically even without explicit character filters.
- **Supports primary names and aliases:**
- Simple entries: `My Favorite Series`
- Grouped entries with a primary name for the folder: `([Primary Name], alias1, alias2)`
- **Stored in Standard App Data Path**
@ -234,7 +260,7 @@ pyinstaller --name "Kemono Downloader" --onefile --windowed --icon="Kemono.ico"
## **🗂 Config Files**
- `Known.txt` — character/show names used for folder organization
- Supports grouped names in format: `(Name1, Name2)`
- Supports simple names (e.g., `My Series`) and grouped names with a primary folder name and aliases (e.g., `([Primary Folder Name], alias1, alias2)`).
***