mirror of
https://github.com/Yuvi9587/Kemono-Downloader.git
synced 2025-12-17 23:47:15 +00:00
commit
This commit is contained in:
parent
661b97aa16
commit
f7c4d892a8
72
src/core/discord_client.py
Normal file
72
src/core/discord_client.py
Normal file
@ -0,0 +1,72 @@
|
||||
import time
|
||||
import requests
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
|
||||
def fetch_server_channels(server_id, logger, cookies=None):
|
||||
"""
|
||||
Fetches the list of channels for a given Discord server ID from the Kemono API.
|
||||
"""
|
||||
domains_to_try = ["kemono.cr", "kemono.su"]
|
||||
for domain in domains_to_try:
|
||||
lookup_url = f"https://{domain}/api/v1/discord/channel/lookup/{server_id}"
|
||||
logger(f" Attempting to fetch channel list from: {lookup_url}")
|
||||
try:
|
||||
response = requests.get(lookup_url, cookies=cookies, timeout=15)
|
||||
response.raise_for_status()
|
||||
channels = response.json()
|
||||
if isinstance(channels, list):
|
||||
logger(f" ✅ Found {len(channels)} channels for server {server_id}.")
|
||||
return channels
|
||||
except (requests.exceptions.RequestException, json.JSONDecodeError):
|
||||
# This is a silent failure, we'll just try the next domain
|
||||
pass
|
||||
|
||||
logger(f" ❌ Failed to fetch channel list for server {server_id} from all available domains.")
|
||||
return None
|
||||
|
||||
def fetch_channel_messages(channel_id, logger, cancellation_event, pause_event, cookies=None):
|
||||
"""
|
||||
Fetches all messages from a Discord channel by looping through API pages (pagination).
|
||||
Uses a page size of 150 and handles the specific offset logic.
|
||||
"""
|
||||
offset = 0
|
||||
page_size = 150 # Corrected page size based on your findings
|
||||
api_base_url = f"https://kemono.cr/api/v1/discord/channel/{channel_id}"
|
||||
|
||||
while not (cancellation_event and cancellation_event.is_set()):
|
||||
if pause_event and pause_event.is_set():
|
||||
logger(" Message fetching paused...")
|
||||
while pause_event.is_set():
|
||||
if cancellation_event and cancellation_event.is_set(): break
|
||||
time.sleep(0.5)
|
||||
logger(" Message fetching resumed.")
|
||||
|
||||
if cancellation_event and cancellation_event.is_set():
|
||||
break
|
||||
|
||||
paginated_url = f"{api_base_url}?o={offset}"
|
||||
logger(f" Fetching messages from API: page starting at offset {offset}")
|
||||
|
||||
try:
|
||||
response = requests.get(paginated_url, cookies=cookies, timeout=20)
|
||||
response.raise_for_status()
|
||||
messages_batch = response.json()
|
||||
|
||||
if not messages_batch:
|
||||
logger(f" ✅ Reached end of messages for channel {channel_id}.")
|
||||
break
|
||||
|
||||
logger(f" Fetched {len(messages_batch)} messages...")
|
||||
yield messages_batch
|
||||
|
||||
if len(messages_batch) < page_size:
|
||||
logger(f" ✅ Last page of messages received for channel {channel_id}.")
|
||||
break
|
||||
|
||||
offset += page_size
|
||||
time.sleep(0.5)
|
||||
|
||||
except (requests.exceptions.RequestException, json.JSONDecodeError) as e:
|
||||
logger(f" ❌ Error fetching messages at offset {offset}: {e}")
|
||||
break
|
||||
@ -826,37 +826,60 @@ class PostProcessorWorker:
|
||||
return 0, 1, filename_to_save_in_main_path, was_original_name_kept_flag, FILE_DOWNLOAD_STATUS_FAILED_RETRYABLE_LATER, details_for_failure
|
||||
|
||||
def process(self):
|
||||
# --- START: REFACTORED PROCESS METHOD ---
|
||||
|
||||
# 1. DATA MAPPING: Map Discord Message or Creator Post fields to a consistent set of variables.
|
||||
if self.service == 'discord':
|
||||
# For Discord, self.post is a MESSAGE object from the API.
|
||||
post_title = self.post.get('content', '') or f"Message {self.post.get('id', 'N/A')}"
|
||||
post_id = self.post.get('id', 'unknown_id')
|
||||
post_main_file_info = {} # Discord messages don't have a single main file
|
||||
post_attachments = self.post.get('attachments', [])
|
||||
post_content_html = self.post.get('content', '')
|
||||
post_data = self.post # Keep a reference to the original message object
|
||||
log_prefix = "Message"
|
||||
else:
|
||||
# Existing logic for standard creator posts
|
||||
post_title = self.post.get('title', '') or 'untitled_post'
|
||||
post_id = self.post.get('id', 'unknown_id')
|
||||
post_main_file_info = self.post.get('file')
|
||||
post_attachments = self.post.get('attachments', [])
|
||||
post_content_html = self.post.get('content', '')
|
||||
post_data = self.post # Reference to the post object
|
||||
log_prefix = "Post"
|
||||
|
||||
# 2. SHARED PROCESSING LOGIC: The rest of the function now uses the consistent variables from above.
|
||||
result_tuple = (0, 0, [], [], [], None, None)
|
||||
total_downloaded_this_post = 0
|
||||
total_skipped_this_post = 0
|
||||
determined_post_save_path_for_history = self.override_output_dir if self.override_output_dir else self.download_root
|
||||
|
||||
try:
|
||||
if self._check_pause(f"Post processing for ID {self.post.get('id', 'N/A')}"):
|
||||
result_tuple = (0, 0, [], [], [], None, None)
|
||||
return result_tuple
|
||||
if self._check_pause(f"{log_prefix} processing for ID {post_id}"):
|
||||
return (0, 0, [], [], [], None, None)
|
||||
if self.check_cancel():
|
||||
result_tuple = (0, 0, [], [], [], None, None)
|
||||
return result_tuple
|
||||
return (0, 0, [], [], [], None, None)
|
||||
|
||||
current_character_filters = self._get_current_character_filters()
|
||||
kept_original_filenames_for_log = []
|
||||
retryable_failures_this_post = []
|
||||
permanent_failures_this_post = []
|
||||
total_downloaded_this_post = 0
|
||||
total_skipped_this_post = 0
|
||||
|
||||
history_data_for_this_post = None
|
||||
|
||||
parsed_api_url = urlparse(self.api_url_input)
|
||||
post_data = self.post
|
||||
post_id = post_data.get('id', 'unknown_id')
|
||||
|
||||
# CONTEXT-AWARE URL for Referer Header
|
||||
if self.service == 'discord':
|
||||
server_id = self.user_id
|
||||
channel_id = self.post.get('channel', 'unknown_channel')
|
||||
post_page_url = f"https://{parsed_api_url.netloc}/discord/server/{server_id}/{channel_id}"
|
||||
else:
|
||||
post_page_url = f"https://{parsed_api_url.netloc}/{self.service}/user/{self.user_id}/post/{post_id}"
|
||||
|
||||
post_page_url = f"https://{parsed_api_url.netloc}/{self.service}/user/{self.user_id}/post/{post_id}"
|
||||
headers = {'User-Agent': 'Mozilla/5.0', 'Referer': post_page_url, 'Accept': '*/*'}
|
||||
link_pattern = re.compile(r"""<a\s+.*?href=["'](https?://[^"']+)["'][^>]*>(.*?)</a>""", re.IGNORECASE | re.DOTALL)
|
||||
post_data = self.post
|
||||
post_title = post_data.get('title', '') or 'untitled_post'
|
||||
post_id = post_data.get('id', 'unknown_id')
|
||||
post_main_file_info = post_data.get('file')
|
||||
post_attachments = post_data.get('attachments', [])
|
||||
|
||||
|
||||
effective_unwanted_keywords_for_folder_naming = self.unwanted_keywords.copy()
|
||||
is_full_creator_download_no_char_filter = not self.target_post_id_from_initial_url and not current_character_filters
|
||||
|
||||
@ -874,9 +897,9 @@ class PostProcessorWorker:
|
||||
self.logger(f" Applying creator download specific folder ignore words ({len(self.creator_download_folder_ignore_words)} words).")
|
||||
effective_unwanted_keywords_for_folder_naming.update(self.creator_download_folder_ignore_words)
|
||||
|
||||
post_content_html = post_data.get('content', '')
|
||||
if not self.extract_links_only:
|
||||
self.logger(f"\n--- Processing Post {post_id} ('{post_title[:50]}...') (Thread: {threading.current_thread().name}) ---")
|
||||
self.logger(f"\n--- Processing {log_prefix} {post_id} ('{post_title[:50]}...') (Thread: {threading.current_thread().name}) ---")
|
||||
|
||||
num_potential_files_in_post = len(post_attachments or []) + (1 if post_main_file_info and post_main_file_info.get('path') else 0)
|
||||
|
||||
post_is_candidate_by_title_char_match = False
|
||||
@ -920,7 +943,7 @@ class PostProcessorWorker:
|
||||
if original_api_att_name:
|
||||
all_files_from_post_api_for_char_check.append({'_original_name_for_log': original_api_att_name})
|
||||
|
||||
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS:
|
||||
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS and self.service != 'discord':
|
||||
self.logger(f" [Char Scope: Comments] Phase 1: Checking post files for matches before comments for post ID '{post_id}'.")
|
||||
if self._check_pause(f"File check (comments scope) for post {post_id}"):
|
||||
result_tuple = (0, num_potential_files_in_post, [], [], [], None, None)
|
||||
@ -943,7 +966,7 @@ class PostProcessorWorker:
|
||||
if post_is_candidate_by_file_char_match_in_comment_scope: break
|
||||
self.logger(f" [Char Scope: Comments] Phase 1 Result: post_is_candidate_by_file_char_match_in_comment_scope = {post_is_candidate_by_file_char_match_in_comment_scope}")
|
||||
|
||||
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS:
|
||||
if current_character_filters and self.char_filter_scope == CHAR_SCOPE_COMMENTS and self.service != 'discord':
|
||||
if not post_is_candidate_by_file_char_match_in_comment_scope:
|
||||
if self._check_pause(f"Comment check for post {post_id}"):
|
||||
result_tuple = (0, num_potential_files_in_post, [], [], [], None, None)
|
||||
@ -1007,10 +1030,10 @@ class PostProcessorWorker:
|
||||
return result_tuple
|
||||
|
||||
if not self.extract_links_only and self.manga_mode_active and current_character_filters and (self.char_filter_scope == CHAR_SCOPE_TITLE or self.char_filter_scope == CHAR_SCOPE_BOTH) and not post_is_candidate_by_title_char_match:
|
||||
self.logger(f" -> Skip Post (Manga Mode with Title/Both Scope - No Title Char Match): Title '{post_title[:50]}' doesn't match filters.")
|
||||
self._emit_signal('missed_character_post', post_title, "Manga Mode: No title match for character filter (Title/Both scope)")
|
||||
result_tuple = (0, num_potential_files_in_post, [], [], [], None, None)
|
||||
return result_tuple
|
||||
self.logger(f" -> Skip Post (Manga Mode with Title/Both Scope - No Title Char Match): Title '{post_title[:50]}' doesn't match filters.")
|
||||
self._emit_signal('missed_character_post', post_title, "Manga Mode: No title match for character filter (Title/Both scope)")
|
||||
result_tuple = (0, num_potential_files_in_post, [], [], [], None, None)
|
||||
return result_tuple
|
||||
|
||||
if not isinstance(post_attachments, list):
|
||||
self.logger(f"⚠️ Corrupt attachment data for post {post_id} (expected list, got {type(post_attachments)}). Skipping attachments.")
|
||||
@ -1143,29 +1166,50 @@ class PostProcessorWorker:
|
||||
suffix_counter = 0
|
||||
final_post_subfolder_name = ""
|
||||
|
||||
while True:
|
||||
suffix_counter = 0
|
||||
folder_creation_successful = False
|
||||
final_post_subfolder_name = ""
|
||||
post_id_for_folder = str(self.post.get('id', 'unknown_id'))
|
||||
|
||||
while not folder_creation_successful:
|
||||
if suffix_counter == 0:
|
||||
name_candidate = original_cleaned_post_title_for_sub
|
||||
else:
|
||||
name_candidate = f"{original_cleaned_post_title_for_sub}_{suffix_counter}"
|
||||
|
||||
potential_post_subfolder_path = os.path.join(base_path_for_post_subfolder, name_candidate)
|
||||
try:
|
||||
os.makedirs(potential_post_subfolder_path, exist_ok=False)
|
||||
final_post_subfolder_name = name_candidate
|
||||
if suffix_counter > 0:
|
||||
self.logger(f" Post subfolder name conflict: Using '{final_post_subfolder_name}' instead of '{original_cleaned_post_title_for_sub}' to avoid mixing posts.")
|
||||
break
|
||||
except FileExistsError:
|
||||
suffix_counter += 1
|
||||
if suffix_counter > 100:
|
||||
self.logger(f" ⚠️ Exceeded 100 attempts to find unique subfolder name for '{original_cleaned_post_title_for_sub}'. Using UUID.")
|
||||
final_post_subfolder_name = f"{original_cleaned_post_title_for_sub}_{uuid.uuid4().hex[:8]}"
|
||||
os.makedirs(os.path.join(base_path_for_post_subfolder, final_post_subfolder_name), exist_ok=True)
|
||||
id_file_path = os.path.join(potential_post_subfolder_path, f".postid_{post_id_for_folder}")
|
||||
|
||||
if not os.path.isdir(potential_post_subfolder_path):
|
||||
# Folder does not exist, create it and its ID file
|
||||
try:
|
||||
os.makedirs(potential_post_subfolder_path)
|
||||
with open(id_file_path, 'w') as f:
|
||||
f.write(post_id_for_folder)
|
||||
|
||||
final_post_subfolder_name = name_candidate
|
||||
folder_creation_successful = True
|
||||
if suffix_counter > 0:
|
||||
self.logger(f" Post subfolder name conflict: Using '{final_post_subfolder_name}' to avoid mixing posts.")
|
||||
except OSError as e_mkdir:
|
||||
self.logger(f" ❌ Error creating directory '{potential_post_subfolder_path}': {e_mkdir}.")
|
||||
final_post_subfolder_name = original_cleaned_post_title_for_sub
|
||||
break
|
||||
except OSError as e_mkdir:
|
||||
self.logger(f" ❌ Error creating directory '{potential_post_subfolder_path}': {e_mkdir}. Files for this post might be saved in parent or fail.")
|
||||
final_post_subfolder_name = original_cleaned_post_title_for_sub
|
||||
break
|
||||
else:
|
||||
# Folder exists, check if it's for this post or a different one
|
||||
if os.path.exists(id_file_path):
|
||||
# ID file matches! This is a restore scenario. Reuse the folder.
|
||||
self.logger(f" ℹ️ Re-using existing post subfolder: '{name_candidate}'")
|
||||
final_post_subfolder_name = name_candidate
|
||||
folder_creation_successful = True
|
||||
else:
|
||||
# Folder exists but ID file does not match (or is missing). This is a normal name collision.
|
||||
suffix_counter += 1
|
||||
if suffix_counter > 100: # Safety break
|
||||
self.logger(f" ⚠️ Exceeded 100 attempts to find unique subfolder for '{original_cleaned_post_title_for_sub}'.")
|
||||
final_post_subfolder_name = f"{original_cleaned_post_title_for_sub}_{uuid.uuid4().hex[:8]}"
|
||||
os.makedirs(os.path.join(base_path_for_post_subfolder, final_post_subfolder_name), exist_ok=True)
|
||||
break
|
||||
determined_post_save_path_for_history = os.path.join(base_path_for_post_subfolder, final_post_subfolder_name)
|
||||
|
||||
if self.skip_words_list and (self.skip_words_scope == SKIP_SCOPE_POSTS or self.skip_words_scope == SKIP_SCOPE_BOTH):
|
||||
@ -1807,14 +1851,23 @@ class PostProcessorWorker:
|
||||
permanent_failures_this_post, history_data_for_this_post,
|
||||
None)
|
||||
|
||||
except Exception as main_thread_err:
|
||||
self.logger(f"\n❌ Critical error within Worker process for {log_prefix} {post_id}: {main_thread_err}")
|
||||
self.logger(traceback.format_exc())
|
||||
# Ensure we still return a valid tuple to prevent the app from stalling
|
||||
result_tuple = (0, 1, [], [], [{'error': str(main_thread_err)}], None, None)
|
||||
finally:
|
||||
# This block ALWAYS executes, ensuring that every task signals its completion.
|
||||
# This is critical for the main thread to know when all work is done.
|
||||
if not self.extract_links_only and self.use_post_subfolders and total_downloaded_this_post == 0:
|
||||
path_to_check_for_emptiness = determined_post_save_path_for_history
|
||||
try:
|
||||
# Check if the path is a directory and if it's empty
|
||||
if os.path.isdir(path_to_check_for_emptiness) and not os.listdir(path_to_check_for_emptiness):
|
||||
self.logger(f" 🗑️ Removing empty post-specific subfolder: '{path_to_check_for_emptiness}'")
|
||||
os.rmdir(path_to_check_for_emptiness)
|
||||
except OSError as e_rmdir:
|
||||
# Log if removal fails for any reason (e.g., permissions)
|
||||
self.logger(f" ⚠️ Could not remove potentially empty subfolder '{path_to_check_for_emptiness}': {e_rmdir}")
|
||||
|
||||
self._emit_signal('worker_finished', result_tuple)
|
||||
|
||||
146
src/ui/dialogs/discord_pdf_generator.py
Normal file
146
src/ui/dialogs/discord_pdf_generator.py
Normal file
@ -0,0 +1,146 @@
|
||||
import os
|
||||
import re
|
||||
import datetime
|
||||
try:
|
||||
from fpdf import FPDF
|
||||
FPDF_AVAILABLE = True
|
||||
|
||||
class PDF(FPDF):
|
||||
"""Custom PDF class for Discord chat logs."""
|
||||
def __init__(self, server_name, channel_name, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self.server_name = server_name
|
||||
self.channel_name = channel_name
|
||||
self.default_font_family = 'DejaVu' # Can be changed to Arial if font fails
|
||||
|
||||
def header(self):
|
||||
if self.page_no() == 1:
|
||||
return # No header on the title page
|
||||
self.set_font(self.default_font_family, '', 8)
|
||||
self.cell(0, 10, f'{self.server_name} - #{self.channel_name}', 0, 0, 'L')
|
||||
self.cell(0, 10, 'Page ' + str(self.page_no()), 0, 0, 'R')
|
||||
self.ln(10)
|
||||
|
||||
def footer(self):
|
||||
pass # No footer needed, header has page number
|
||||
|
||||
except ImportError:
|
||||
FPDF_AVAILABLE = False
|
||||
FPDF = None
|
||||
PDF = None
|
||||
|
||||
def create_pdf_from_discord_messages(messages_data, server_name, channel_name, output_filename, font_path, logger=print):
|
||||
"""
|
||||
Creates a single PDF from a list of Discord message objects, formatted as a chat log.
|
||||
UPDATED to include clickable links for attachments and embeds.
|
||||
"""
|
||||
if not FPDF_AVAILABLE:
|
||||
logger("❌ PDF Creation failed: 'fpdf2' library is not installed.")
|
||||
return False
|
||||
|
||||
if not messages_data:
|
||||
logger(" No messages were found or fetched to create a PDF.")
|
||||
return False
|
||||
|
||||
logger(" Sorting messages by date (oldest first)...")
|
||||
messages_data.sort(key=lambda m: m.get('published', ''))
|
||||
|
||||
pdf = PDF(server_name, channel_name)
|
||||
default_font_family = 'DejaVu'
|
||||
|
||||
try:
|
||||
bold_font_path = font_path.replace("DejaVuSans.ttf", "DejaVuSans-Bold.ttf")
|
||||
if not os.path.exists(font_path) or not os.path.exists(bold_font_path):
|
||||
raise RuntimeError("Font files not found")
|
||||
|
||||
pdf.add_font('DejaVu', '', font_path, uni=True)
|
||||
pdf.add_font('DejaVu', 'B', bold_font_path, uni=True)
|
||||
except Exception as font_error:
|
||||
logger(f" ⚠️ Could not load DejaVu font: {font_error}. Falling back to Arial.")
|
||||
default_font_family = 'Arial'
|
||||
pdf.default_font_family = 'Arial'
|
||||
|
||||
# --- Title Page ---
|
||||
pdf.add_page()
|
||||
pdf.set_font(default_font_family, 'B', 24)
|
||||
pdf.cell(w=0, h=20, text="Discord Chat Log", align='C', new_x="LMARGIN", new_y="NEXT")
|
||||
pdf.ln(10)
|
||||
pdf.set_font(default_font_family, '', 16)
|
||||
pdf.cell(w=0, h=10, text=f"Server: {server_name}", align='C', new_x="LMARGIN", new_y="NEXT")
|
||||
pdf.cell(w=0, h=10, text=f"Channel: #{channel_name}", align='C', new_x="LMARGIN", new_y="NEXT")
|
||||
pdf.ln(5)
|
||||
pdf.set_font(default_font_family, '', 10)
|
||||
pdf.cell(w=0, h=10, text=f"Generated on: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}", align='C', new_x="LMARGIN", new_y="NEXT")
|
||||
pdf.cell(w=0, h=10, text=f"Total Messages: {len(messages_data)}", align='C', new_x="LMARGIN", new_y="NEXT")
|
||||
|
||||
pdf.add_page()
|
||||
|
||||
logger(f" Starting PDF creation with {len(messages_data)} messages...")
|
||||
|
||||
for i, message in enumerate(messages_data):
|
||||
author = message.get('author', {}).get('global_name') or message.get('author', {}).get('username', 'Unknown User')
|
||||
timestamp_str = message.get('published', '')
|
||||
content = message.get('content', '')
|
||||
attachments = message.get('attachments', [])
|
||||
embeds = message.get('embeds', [])
|
||||
|
||||
try:
|
||||
# Handle timezone information correctly
|
||||
if timestamp_str.endswith('Z'):
|
||||
timestamp_str = timestamp_str[:-1] + '+00:00'
|
||||
dt_obj = datetime.datetime.fromisoformat(timestamp_str)
|
||||
formatted_timestamp = dt_obj.strftime('%Y-%m-%d %H:%M:%S')
|
||||
except (ValueError, TypeError):
|
||||
formatted_timestamp = timestamp_str
|
||||
|
||||
# Draw a separator line
|
||||
if i > 0:
|
||||
pdf.ln(2)
|
||||
pdf.set_draw_color(200, 200, 200) # Light grey line
|
||||
pdf.cell(0, 0, '', border='T')
|
||||
pdf.ln(2)
|
||||
|
||||
# Message Header
|
||||
pdf.set_font(default_font_family, 'B', 11)
|
||||
pdf.write(5, f"{author} ")
|
||||
pdf.set_font(default_font_family, '', 9)
|
||||
pdf.set_text_color(128, 128, 128)
|
||||
pdf.write(5, f"({formatted_timestamp})")
|
||||
pdf.set_text_color(0, 0, 0)
|
||||
pdf.ln(6)
|
||||
|
||||
# Message Content
|
||||
if content:
|
||||
pdf.set_font(default_font_family, '', 10)
|
||||
pdf.multi_cell(w=0, h=5, text=content)
|
||||
|
||||
# --- START: MODIFIED ATTACHMENT AND EMBED LOGIC ---
|
||||
if attachments or embeds:
|
||||
pdf.ln(1)
|
||||
pdf.set_font(default_font_family, '', 9)
|
||||
pdf.set_text_color(22, 119, 219) # A nice blue for links
|
||||
|
||||
for att in attachments:
|
||||
file_name = att.get('name', 'untitled')
|
||||
file_path = att.get('path', '')
|
||||
# Construct the full, clickable URL for the attachment
|
||||
full_url = f"https://kemono.cr/data{file_path}"
|
||||
pdf.write(5, text=f"[Attachment: {file_name}]", link=full_url)
|
||||
pdf.ln() # New line after each attachment
|
||||
|
||||
for embed in embeds:
|
||||
embed_url = embed.get('url', 'no url')
|
||||
# The embed URL is already a full URL
|
||||
pdf.write(5, text=f"[Embed: {embed_url}]", link=embed_url)
|
||||
pdf.ln() # New line after each embed
|
||||
|
||||
pdf.set_text_color(0, 0, 0) # Reset color to black
|
||||
# --- END: MODIFIED ATTACHMENT AND EMBED LOGIC ---
|
||||
|
||||
try:
|
||||
pdf.output(output_filename)
|
||||
logger(f"✅ Successfully created Discord chat log PDF: '{os.path.basename(output_filename)}'")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger(f"❌ A critical error occurred while saving the final PDF: {e}")
|
||||
return False
|
||||
@ -34,6 +34,7 @@ from ..core.workers import DownloadThread as BackendDownloadThread
|
||||
from ..core.workers import PostProcessorWorker
|
||||
from ..core.workers import PostProcessorSignals
|
||||
from ..core.api_client import download_from_api
|
||||
from ..core.discord_client import fetch_server_channels, fetch_channel_messages
|
||||
from ..core.manager import DownloadManager
|
||||
from .assets import get_app_icon_object
|
||||
from ..config.constants import *
|
||||
@ -56,6 +57,7 @@ from .dialogs.FavoriteArtistsDialog import FavoriteArtistsDialog
|
||||
from .dialogs.ConfirmAddAllDialog import ConfirmAddAllDialog
|
||||
from .dialogs.MoreOptionsDialog import MoreOptionsDialog
|
||||
from .dialogs.SinglePDF import create_single_pdf_from_content
|
||||
from .dialogs.discord_pdf_generator import create_pdf_from_discord_messages
|
||||
from .dialogs.SupportDialog import SupportDialog
|
||||
from .dialogs.KeepDuplicatesDialog import KeepDuplicatesDialog
|
||||
from .dialogs.MultipartScopeDialog import MultipartScopeDialog
|
||||
@ -241,6 +243,8 @@ class DownloaderApp (QWidget ):
|
||||
self.single_pdf_mode = False
|
||||
self.save_creator_json_enabled_this_session = True
|
||||
self.is_single_post_session = False
|
||||
self.discord_download_scope = 'files'
|
||||
|
||||
|
||||
print(f"ℹ️ Known.txt will be loaded/saved at: {self.config_file}")
|
||||
|
||||
@ -621,6 +625,14 @@ class DownloaderApp (QWidget ):
|
||||
self.cancel_btn.setEnabled(False)
|
||||
self.cancel_btn.setToolTip(self._tr("cancel_button_tooltip", "Click to cancel the ongoing download/extraction process and reset the UI fields (preserving URL and Directory)."))
|
||||
|
||||
def update_discord_button_visibility(self, text=""):
|
||||
if not hasattr(self, 'save_discord_as_pdf_btn'):
|
||||
return
|
||||
url_text = self.link_input.text().strip()
|
||||
service, _, _ = extract_post_info(url_text)
|
||||
is_discord = (service == 'discord')
|
||||
self.save_discord_as_pdf_btn.setVisible(is_discord)
|
||||
|
||||
def _clear_update_selection(self):
|
||||
"""Clears the loaded creator profile and fully resets the UI to its default state."""
|
||||
self.log_signal.emit("ℹ️ Update selection cleared. Resetting UI to defaults.")
|
||||
@ -791,6 +803,9 @@ class DownloaderApp (QWidget ):
|
||||
self .use_cookie_checkbox .toggled .connect (self ._update_cookie_input_visibility )
|
||||
if hasattr (self ,'link_input'):
|
||||
self .link_input .textChanged .connect (self ._sync_queue_with_link_input )
|
||||
self.link_input.textChanged.connect(self._update_contextual_ui_elements)
|
||||
if hasattr(self, 'discord_scope_toggle_button'):
|
||||
self.discord_scope_toggle_button.clicked.connect(self._cycle_discord_scope)
|
||||
if hasattr (self ,'cookie_browse_button'):
|
||||
self .cookie_browse_button .clicked .connect (self ._browse_cookie_file )
|
||||
if hasattr (self ,'cookie_text_input'):
|
||||
@ -954,6 +969,10 @@ class DownloaderApp (QWidget ):
|
||||
self ._handle_file_successfully_downloaded (payload [0 ])
|
||||
elif signal_type == 'worker_finished':
|
||||
self.actual_gui_signals.worker_finished_signal.emit(payload[0] if payload else tuple())
|
||||
elif signal_type == 'set_progress_label' and self.progress_label:
|
||||
self.progress_label.setText(payload[0] if payload else "")
|
||||
elif signal_type == 'set_ui_enabled':
|
||||
self.set_ui_enabled(payload[0] if payload else True)
|
||||
else:
|
||||
self .log_signal .emit (f"⚠️ Unknown signal type from worker queue: {signal_type }")
|
||||
self .worker_to_gui_queue .task_done ()
|
||||
@ -1020,6 +1039,107 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self .character_list .addItems ([entry ["name"]for entry in KNOWN_NAMES ])
|
||||
|
||||
def start_discord_pdf_save(self):
|
||||
if self._is_download_active():
|
||||
QMessageBox.warning(self, "Busy", "Another operation is already in progress.")
|
||||
return
|
||||
|
||||
api_url = self.link_input.text().strip()
|
||||
service, server_id, channel_id = extract_post_info(api_url)
|
||||
|
||||
if service != 'discord':
|
||||
QMessageBox.critical(self, "Input Error", "This feature is only for Discord URLs.")
|
||||
return
|
||||
|
||||
# --- Get Save Filename ---
|
||||
default_filename = f"discord_{server_id}_{channel_id or 'server'}.pdf"
|
||||
filepath, _ = QFileDialog.getSaveFileName(self, "Save Discord Log as PDF", default_filename, "PDF Files (*.pdf)")
|
||||
|
||||
if not filepath:
|
||||
self.log_signal.emit("ℹ️ Discord PDF save cancelled by user.")
|
||||
return
|
||||
|
||||
# --- Create and run the background thread ---
|
||||
pdf_thread = threading.Thread(
|
||||
target=self._run_discord_pdf_creation_thread,
|
||||
args=(api_url, server_id, channel_id, filepath),
|
||||
daemon=True
|
||||
)
|
||||
pdf_thread.start()
|
||||
|
||||
def _run_discord_pdf_creation_thread(self, api_url, server_id, channel_id, output_filepath):
|
||||
# --- START: NEW THREAD-SAFE LOGGER ---
|
||||
def queue_logger(message):
|
||||
# This helper function puts log messages into the thread-safe queue
|
||||
self.worker_to_gui_queue.put({'type': 'progress', 'payload': (message,)})
|
||||
|
||||
# This helper function will update the main progress label via the queue
|
||||
def queue_progress_label_update(message):
|
||||
self.worker_to_gui_queue.put({'type': 'set_progress_label', 'payload': (message,)})
|
||||
# --- END: NEW THREAD-SAFE LOGGER ---
|
||||
|
||||
self.set_ui_enabled(False)
|
||||
queue_logger("=" * 40)
|
||||
queue_logger(f"🚀 Starting Discord PDF export for: {api_url}")
|
||||
queue_progress_label_update("Fetching messages...")
|
||||
|
||||
all_messages = []
|
||||
cookies = prepare_cookies_for_request(
|
||||
self.use_cookie_checkbox.isChecked(), self.cookie_text_input.text(),
|
||||
self.selected_cookie_filepath, self.app_base_dir, queue_logger # Use safe logger
|
||||
)
|
||||
|
||||
channels_to_process = []
|
||||
server_name_for_pdf = server_id
|
||||
|
||||
if channel_id:
|
||||
channels_to_process.append({'id': channel_id, 'name': channel_id})
|
||||
else:
|
||||
channels = fetch_server_channels(server_id, queue_logger, cookies) # Use safe logger
|
||||
if channels:
|
||||
channels_to_process = channels
|
||||
# In a real scenario, you'd get the server name from an API. We'll use the ID.
|
||||
server_name_for_pdf = server_id
|
||||
else:
|
||||
queue_logger(f"❌ Could not find any channels for server {server_id}.")
|
||||
self.worker_to_gui_queue.put({'type': 'set_ui_enabled', 'payload': (True,)})
|
||||
return
|
||||
|
||||
# Fetch messages for all required channels
|
||||
for i, channel in enumerate(channels_to_process):
|
||||
queue_progress_label_update(f"Fetching from channel {i+1}/{len(channels_to_process)}: #{channel.get('name', '')}")
|
||||
message_generator = fetch_channel_messages(channel['id'], queue_logger, self.cancellation_event, self.pause_event, cookies) # Use safe logger
|
||||
for message_batch in message_generator:
|
||||
all_messages.extend(message_batch)
|
||||
|
||||
queue_progress_label_update(f"Collected {len(all_messages)} total messages. Generating PDF...")
|
||||
|
||||
# Determine font path
|
||||
if getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS'):
|
||||
base_path = sys._MEIPASS
|
||||
else:
|
||||
base_path = self.app_base_dir
|
||||
font_path = os.path.join(base_path, 'data', 'dejavu-sans', 'DejaVuSans.ttf')
|
||||
|
||||
# Generate the PDF
|
||||
success = create_pdf_from_discord_messages(
|
||||
all_messages,
|
||||
server_name_for_pdf,
|
||||
channels_to_process[0].get('name', channel_id) if len(channels_to_process) == 1 else "All Channels",
|
||||
output_filepath,
|
||||
font_path,
|
||||
logger=queue_logger # Use safe logger
|
||||
)
|
||||
|
||||
if success:
|
||||
queue_progress_label_update(f"✅ PDF export complete!")
|
||||
else:
|
||||
queue_progress_label_update(f"❌ PDF export failed. Check log for details.")
|
||||
|
||||
queue_logger("=" * 40)
|
||||
# Safely re-enable the UI from the main thread via the queue
|
||||
self.worker_to_gui_queue.put({'type': 'set_ui_enabled', 'payload': (True,)})
|
||||
|
||||
def save_known_names(self):
|
||||
"""
|
||||
Saves the current list of known names (KNOWN_NAMES) to the config file.
|
||||
@ -1708,15 +1828,19 @@ class DownloaderApp (QWidget ):
|
||||
scrollbar .setValue (0 )
|
||||
|
||||
def _is_download_active (self ):
|
||||
single_thread_active =self .download_thread and self .download_thread .isRunning ()
|
||||
single_thread_active = False
|
||||
if self.download_thread:
|
||||
# Handle both QThread and standard threading.Thread
|
||||
if hasattr(self.download_thread, 'isRunning') and self.download_thread.isRunning():
|
||||
single_thread_active = True
|
||||
elif hasattr(self.download_thread, 'is_alive') and self.download_thread.is_alive():
|
||||
single_thread_active = True
|
||||
|
||||
fetcher_active =hasattr (self ,'is_fetcher_thread_running')and self .is_fetcher_thread_running
|
||||
pool_has_active_tasks =self .thread_pool is not None and any (not f .done ()for f in self .active_futures if f is not None )
|
||||
retry_pool_active =hasattr (self ,'retry_thread_pool')and self .retry_thread_pool is not None and hasattr (self ,'active_retry_futures')and any (not f .done ()for f in self .active_retry_futures if f is not None )
|
||||
|
||||
|
||||
external_dl_thread_active =hasattr (self ,'external_link_download_thread')and self .external_link_download_thread is not None and self .external_link_download_thread .isRunning ()
|
||||
|
||||
return single_thread_active or fetcher_active or pool_has_active_tasks or retry_pool_active or external_dl_thread_active
|
||||
return single_thread_active or fetcher_active or pool_has_active_tasks or retry_pool_active or external_dl_thread_active
|
||||
|
||||
def handle_external_link_signal (self ,post_title ,link_text ,link_url ,platform ,decryption_key ):
|
||||
link_data =(post_title ,link_text ,link_url ,platform ,decryption_key )
|
||||
@ -2706,11 +2830,28 @@ class DownloaderApp (QWidget ):
|
||||
self .favorite_mode_posts_button .setEnabled (False )
|
||||
|
||||
def update_ui_for_manga_mode (self ,checked ):
|
||||
# --- START: NEW DISCORD UI LOGIC ---
|
||||
url_text =self .link_input .text ().strip ()if self .link_input else ""
|
||||
service, _, _ = extract_post_info(url_text)
|
||||
is_discord_url = (service == 'discord')
|
||||
|
||||
if is_discord_url:
|
||||
# When a discord URL is detected, disable incompatible options
|
||||
if self.manga_mode_checkbox:
|
||||
self.manga_mode_checkbox.setEnabled(False)
|
||||
self.manga_mode_checkbox.setChecked(False)
|
||||
if self.page_range_label: self.page_range_label.setEnabled(False)
|
||||
if self.start_page_input: self.start_page_input.setEnabled(False)
|
||||
if self.to_label: self.to_label.setEnabled(False)
|
||||
if self.end_page_input: self.end_page_input.setEnabled(False)
|
||||
checked = False # Force manga mode off
|
||||
# --- END: NEW DISCORD UI LOGIC ---
|
||||
|
||||
is_only_links_mode =self .radio_only_links and self .radio_only_links .isChecked ()
|
||||
is_only_archives_mode =self .radio_only_archives and self .radio_only_archives .isChecked ()
|
||||
is_only_audio_mode =hasattr (self ,'radio_only_audio')and self .radio_only_audio .isChecked ()
|
||||
|
||||
url_text =self .link_input .text ().strip ()if self .link_input else ""
|
||||
# The rest of the original function continues from here...
|
||||
_ ,_ ,post_id =extract_post_info (url_text )
|
||||
|
||||
is_creator_feed =not post_id if url_text else False
|
||||
@ -2720,7 +2861,8 @@ class DownloaderApp (QWidget ):
|
||||
if self.favorite_download_queue and all(item.get('type') == 'single_post_from_popup' for item in self.favorite_download_queue):
|
||||
is_single_post = True
|
||||
|
||||
can_enable_manga_checkbox = (is_creator_feed or is_single_post) and not is_favorite_mode_on
|
||||
# --- MODIFIED: Added check for is_discord_url ---
|
||||
can_enable_manga_checkbox = (is_creator_feed or is_single_post) and not is_favorite_mode_on and not is_discord_url
|
||||
|
||||
if self .manga_mode_checkbox :
|
||||
self .manga_mode_checkbox .setEnabled (can_enable_manga_checkbox)
|
||||
@ -2738,7 +2880,9 @@ class DownloaderApp (QWidget ):
|
||||
if self .manga_rename_toggle_button :
|
||||
self .manga_rename_toggle_button .setVisible (manga_mode_effectively_on and not (is_only_links_mode or is_only_archives_mode or is_only_audio_mode ))
|
||||
|
||||
self .update_page_range_enabled_state ()
|
||||
# --- MODIFIED: Added check for is_discord_url ---
|
||||
if not is_discord_url:
|
||||
self .update_page_range_enabled_state ()
|
||||
|
||||
current_filename_style =self .manga_filename_style
|
||||
|
||||
@ -2773,7 +2917,6 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self ._update_multithreading_for_date_mode ()
|
||||
|
||||
|
||||
def filter_character_list (self ,search_text ):
|
||||
search_text_lower =search_text .lower ()
|
||||
for i in range (self .character_list .count ()):
|
||||
@ -2841,7 +2984,35 @@ class DownloaderApp (QWidget ):
|
||||
if total_posts >0 or processed_posts >0 :
|
||||
self .file_progress_label .setText ("")
|
||||
|
||||
def _update_contextual_ui_elements(self, text=""):
|
||||
"""Shows or hides UI elements based on the URL, like the Discord scope button."""
|
||||
if not hasattr(self, 'discord_scope_toggle_button'): return
|
||||
url_text = self.link_input.text().strip()
|
||||
service, _, _ = extract_post_info(url_text)
|
||||
is_discord = (service == 'discord')
|
||||
self.discord_scope_toggle_button.setVisible(is_discord)
|
||||
if is_discord: self._update_discord_scope_button_text()
|
||||
else: self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
|
||||
|
||||
def _update_discord_scope_button_text(self):
|
||||
"""Updates the text of the discord scope button and the main download button."""
|
||||
if self.discord_download_scope == 'files':
|
||||
self.discord_scope_toggle_button.setText("Scope: Files")
|
||||
self.download_btn.setText(self._tr("start_download_button_text", "⬇️ Start Download"))
|
||||
else:
|
||||
self.discord_scope_toggle_button.setText("Scope: Messages")
|
||||
self.download_btn.setText("📄 Save Messages as PDF")
|
||||
|
||||
def _cycle_discord_scope(self):
|
||||
"""Toggles between 'files' and 'messages' for Discord downloads."""
|
||||
self.discord_download_scope = 'messages' if self.discord_download_scope == 'files' else 'files'
|
||||
self._update_discord_scope_button_text()
|
||||
|
||||
def start_download(self, direct_api_url=None, override_output_dir=None, is_restore=False, is_continuation=False, item_type_from_queue=None):
|
||||
# --- NEW: Import clean_folder_name here ---
|
||||
from ..utils.file_utils import clean_folder_name
|
||||
from ..config.constants import FOLDER_NAME_STOP_WORDS
|
||||
|
||||
self.finish_lock = threading.Lock()
|
||||
self.is_finishing = False
|
||||
if self.active_update_profile:
|
||||
@ -2852,7 +3023,7 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.is_finishing = False
|
||||
self.downloaded_hash_counts.clear()
|
||||
global KNOWN_NAMES, BackendDownloadThread, PostProcessorWorker, extract_post_info, clean_folder_name, MAX_FILE_THREADS_PER_POST_OR_WORKER
|
||||
global KNOWN_NAMES, BackendDownloadThread, PostProcessorWorker, extract_post_info, MAX_FILE_THREADS_PER_POST_OR_WORKER
|
||||
|
||||
if not is_restore and not is_continuation:
|
||||
self.permanently_failed_files_for_dialog.clear()
|
||||
@ -2968,8 +3139,146 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.cancellation_message_logged_this_session = False
|
||||
|
||||
service, user_id, post_id_from_url = extract_post_info(api_url)
|
||||
# --- MODIFIED: ID names are now generic ---
|
||||
service, id1, id2 = extract_post_info(api_url)
|
||||
|
||||
if not service or not id1:
|
||||
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
|
||||
return False
|
||||
|
||||
# --- START: NEW DISCORD LOGIC BRANCH ---
|
||||
if service == 'discord':
|
||||
server_id, channel_id = id1, id2
|
||||
|
||||
def discord_processing_task():
|
||||
def queue_logger(message):
|
||||
self.worker_to_gui_queue.put({'type': 'progress', 'payload': (message,)})
|
||||
|
||||
def queue_progress_label_update(message):
|
||||
self.worker_to_gui_queue.put({'type': 'set_progress_label', 'payload': (message,)})
|
||||
|
||||
cookies = prepare_cookies_for_request(
|
||||
self.use_cookie_checkbox.isChecked(), self.cookie_text_input.text(),
|
||||
self.selected_cookie_filepath, self.app_base_dir, queue_logger
|
||||
)
|
||||
|
||||
# --- SCOPE: MESSAGES (PDF CREATION) ---
|
||||
if self.discord_download_scope == 'messages':
|
||||
queue_logger("=" * 40)
|
||||
queue_logger(f"🚀 Starting Discord PDF export for: {api_url}")
|
||||
|
||||
output_dir = self.dir_input.text().strip()
|
||||
if not output_dir or not os.path.isdir(output_dir):
|
||||
queue_logger("❌ PDF Save Error: No valid download directory selected in the UI.")
|
||||
self.worker_to_gui_queue.put({'type': 'set_ui_enabled', 'payload': (True,)})
|
||||
return
|
||||
|
||||
default_filename = f"discord_{server_id}_{channel_id or 'server'}.pdf"
|
||||
output_filepath = os.path.join(output_dir, default_filename) # We'll save with a default name
|
||||
|
||||
all_messages, channels_to_process = [], []
|
||||
server_name_for_pdf = server_id
|
||||
|
||||
if channel_id:
|
||||
channels_to_process.append({'id': channel_id, 'name': channel_id})
|
||||
else:
|
||||
channels = fetch_server_channels(server_id, queue_logger, cookies)
|
||||
if channels:
|
||||
channels_to_process = channels
|
||||
|
||||
for i, channel in enumerate(channels_to_process):
|
||||
queue_progress_label_update(f"Fetching from channel {i+1}/{len(channels_to_process)}: #{channel.get('name', '')}")
|
||||
message_generator = fetch_channel_messages(channel['id'], queue_logger, self.cancellation_event, self.pause_event, cookies)
|
||||
for message_batch in message_generator:
|
||||
all_messages.extend(message_batch)
|
||||
|
||||
queue_progress_label_update(f"Collected {len(all_messages)} total messages. Generating PDF...")
|
||||
|
||||
if getattr(sys, 'frozen', False):
|
||||
base_path = sys._MEIPASS
|
||||
else:
|
||||
base_path = self.app_base_dir
|
||||
font_path = os.path.join(base_path, 'data', 'dejavu-sans', 'DejaVuSans.ttf')
|
||||
|
||||
success = create_pdf_from_discord_messages(
|
||||
all_messages, server_name_for_pdf,
|
||||
channels_to_process[0].get('name', channel_id) if len(channels_to_process) == 1 else "All Channels",
|
||||
output_filepath, font_path, logger=queue_logger
|
||||
)
|
||||
|
||||
if success:
|
||||
queue_progress_label_update("✅ PDF export complete!")
|
||||
else:
|
||||
queue_progress_label_update("❌ PDF export failed.")
|
||||
self.finished_signal.emit(0, len(all_messages), self.cancellation_event.is_set(), [])
|
||||
return
|
||||
|
||||
# --- SCOPE: FILES (DOWNLOAD) ---
|
||||
elif self.discord_download_scope == 'files':
|
||||
worker_args = {
|
||||
'download_root': effective_output_dir_for_run, 'known_names': list(KNOWN_NAMES),
|
||||
'filter_character_list': self._parse_character_filters(self.character_input.text().strip()),
|
||||
'emitter': self.worker_to_gui_queue, 'unwanted_keywords': FOLDER_NAME_STOP_WORDS,
|
||||
'filter_mode': self.get_filter_mode(), 'skip_zip': self.skip_zip_checkbox.isChecked(),
|
||||
'use_subfolders': self.use_subfolders_checkbox.isChecked(), 'use_post_subfolders': self.use_subfolder_per_post_checkbox.isChecked(),
|
||||
'target_post_id_from_initial_url': None, 'custom_folder_name': None,
|
||||
'compress_images': self.compress_images_checkbox.isChecked(), 'download_thumbnails': self.download_thumbnails_checkbox.isChecked(),
|
||||
'service': service, 'user_id': server_id, 'api_url_input': api_url,
|
||||
'pause_event': self.pause_event, 'cancellation_event': self.cancellation_event,
|
||||
'downloaded_files': self.downloaded_files, 'downloaded_file_hashes': self.downloaded_file_hashes,
|
||||
'downloaded_files_lock': self.downloaded_files_lock, 'downloaded_file_hashes_lock': self.downloaded_file_hashes_lock,
|
||||
'skip_words_list': [word.strip().lower() for word in self.skip_words_input.text().strip().split(',') if word.strip()],
|
||||
'skip_words_scope': self.get_skip_words_scope(), 'char_filter_scope': self.get_char_filter_scope(),
|
||||
'remove_from_filename_words_list': [word.strip() for word in self.remove_from_filename_input.text().strip().split(',') if word.strip()],
|
||||
'scan_content_for_images': self.scan_content_images_checkbox.isChecked(),
|
||||
'manga_mode_active': False,
|
||||
}
|
||||
total_dl, total_skip = 0, 0
|
||||
|
||||
def process_channel_files(channel_id_to_process, output_directory):
|
||||
nonlocal total_dl, total_skip
|
||||
message_generator = fetch_channel_messages(channel_id_to_process, queue_logger, self.cancellation_event, self.pause_event, cookies)
|
||||
for message_batch in message_generator:
|
||||
if self.cancellation_event.is_set():
|
||||
break
|
||||
for message in message_batch:
|
||||
if self.cancellation_event.is_set():
|
||||
break
|
||||
if not message.get('attachments'):
|
||||
continue
|
||||
|
||||
worker_instance_args = worker_args.copy()
|
||||
worker_instance_args.update({'post_data': message, 'download_root': output_directory, 'override_output_dir': output_directory})
|
||||
worker = PostProcessorWorker(**worker_instance_args)
|
||||
dl_count, skip_count, _, _, _, _, _ = worker.process()
|
||||
total_dl += dl_count
|
||||
total_skip += skip_count
|
||||
|
||||
if channel_id:
|
||||
process_channel_files(channel_id, effective_output_dir_for_run)
|
||||
else:
|
||||
channels = fetch_server_channels(server_id, queue_logger, cookies)
|
||||
if channels:
|
||||
for i, channel in enumerate(channels):
|
||||
if self.cancellation_event.is_set():
|
||||
break
|
||||
chan_id = channel.get('id')
|
||||
chan_name = channel.get('name', f"channel_{chan_id}")
|
||||
queue_logger("=" * 40)
|
||||
queue_logger(f"Processing Channel {i+1}/{len(channels)}: '{chan_name}'")
|
||||
channel_dir = os.path.join(effective_output_dir_for_run, clean_folder_name(chan_name))
|
||||
os.makedirs(channel_dir, exist_ok=True)
|
||||
process_channel_files(chan_id, channel_dir)
|
||||
|
||||
self.finished_signal.emit(total_dl, total_skip, self.cancellation_event.is_set(), [])
|
||||
|
||||
self.set_ui_enabled(False)
|
||||
self.download_thread = threading.Thread(target=discord_processing_task, daemon=True)
|
||||
self.download_thread.start()
|
||||
return True
|
||||
|
||||
user_id, post_id_from_url = id1, id2
|
||||
|
||||
if direct_api_url and not post_id_from_url and item_type_from_queue and 'post' in item_type_from_queue:
|
||||
self.log_signal.emit(f"❌ CRITICAL ERROR: Could not parse post ID from the queued POST URL: {api_url}")
|
||||
self.log_signal.emit(" Skipping this item. This might be due to an unsupported URL format or a temporary issue.")
|
||||
@ -2981,10 +3290,6 @@ class DownloaderApp (QWidget ):
|
||||
)
|
||||
return False
|
||||
|
||||
if not service or not user_id:
|
||||
QMessageBox.critical(self, "Input Error", "Invalid or unsupported URL format.")
|
||||
return False
|
||||
|
||||
self.save_creator_json_enabled_this_session = self.settings.value(SAVE_CREATOR_JSON_KEY, True, type=bool)
|
||||
self.is_single_post_session = bool(post_id_from_url)
|
||||
|
||||
@ -3028,8 +3333,6 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
profile_processed_ids = set()
|
||||
|
||||
|
||||
|
||||
session_processed_ids = set(processed_post_ids_for_restore)
|
||||
combined_processed_ids = session_processed_ids.union(profile_processed_ids)
|
||||
processed_post_ids_for_this_run = list(combined_processed_ids)
|
||||
@ -3924,9 +4227,11 @@ class DownloaderApp (QWidget ):
|
||||
self.permanently_failed_files_for_dialog.extend(permanent)
|
||||
self._update_error_button_count()
|
||||
|
||||
if history_data:
|
||||
# This single call now correctly handles both history and profile saving.
|
||||
if history_data and not permanent:
|
||||
self._add_to_history_candidates(history_data)
|
||||
elif history_data and permanent:
|
||||
post_id = history_data.get('post_id', 'N/A')
|
||||
self.log_signal.emit(f"⚠️ Post {post_id} had permanent file failures. It will NOT be marked as processed and will be retried on the next session/update.")
|
||||
|
||||
self.overall_progress_signal.emit(self.total_posts_to_process, self.processed_posts_count)
|
||||
|
||||
@ -4298,7 +4603,8 @@ class DownloaderApp (QWidget ):
|
||||
|
||||
self.progress_label.setText(self._tr("status_cancelling", "Cancelling... Please wait."))
|
||||
|
||||
if self.download_thread and self.download_thread.isRunning():
|
||||
# Only call QThread-specific methods if the thread is a QThread
|
||||
if self.download_thread and hasattr(self.download_thread, 'requestInterruption'):
|
||||
self.download_thread.requestInterruption()
|
||||
self.log_signal.emit(" Signaled single download thread to interrupt.")
|
||||
|
||||
@ -4402,23 +4708,25 @@ class DownloaderApp (QWidget ):
|
||||
self.log_signal.emit("=" * 40)
|
||||
|
||||
if self.download_thread:
|
||||
try:
|
||||
if hasattr(self.download_thread, 'progress_signal'): self.download_thread.progress_signal.disconnect(self.handle_main_log)
|
||||
if hasattr(self.download_thread, 'add_character_prompt_signal'): self.download_thread.add_character_prompt_signal.disconnect(self.add_character_prompt_signal)
|
||||
if hasattr(self.download_thread, 'finished_signal'): self.download_thread.finished_signal.disconnect(self.download_finished)
|
||||
if hasattr(self.download_thread, 'receive_add_character_result'): self.character_prompt_response_signal.disconnect(self.download_thread.receive_add_character_result)
|
||||
if hasattr(self.download_thread, 'external_link_signal'): self.download_thread.external_link_signal.disconnect(self.handle_external_link_signal)
|
||||
if hasattr(self.download_thread, 'file_progress_signal'): self.download_thread.file_progress_signal.disconnect(self.update_file_progress_display)
|
||||
if hasattr(self.download_thread, 'missed_character_post_signal'): self.download_thread.missed_character_post_signal.disconnect(self.handle_missed_character_post)
|
||||
if hasattr(self.download_thread, 'retryable_file_failed_signal'): self.download_thread.retryable_file_failed_signal.disconnect(self._handle_retryable_file_failure)
|
||||
if hasattr(self.download_thread, 'file_successfully_downloaded_signal'): self.download_thread.file_successfully_downloaded_signal.disconnect(self._handle_actual_file_downloaded)
|
||||
if hasattr(self.download_thread, 'post_processed_for_history_signal'): self.download_thread.post_processed_for_history_signal.disconnect(self._add_to_history_candidates)
|
||||
except (TypeError, RuntimeError) as e:
|
||||
self.log_signal.emit(f"ℹ️ Note during single-thread signal disconnection: {e}")
|
||||
if isinstance(self.download_thread, QThread):
|
||||
try:
|
||||
if hasattr(self.download_thread, 'progress_signal'): self.download_thread.progress_signal.disconnect(self.handle_main_log)
|
||||
if hasattr(self.download_thread, 'add_character_prompt_signal'): self.download_thread.add_character_prompt_signal.disconnect(self.add_character_prompt_signal)
|
||||
if hasattr(self.download_thread, 'finished_signal'): self.download_thread.finished_signal.disconnect(self.download_finished)
|
||||
if hasattr(self.download_thread, 'receive_add_character_result'): self.character_prompt_response_signal.disconnect(self.download_thread.receive_add_character_result)
|
||||
if hasattr(self.download_thread, 'external_link_signal'): self.download_thread.external_link_signal.disconnect(self.handle_external_link_signal)
|
||||
if hasattr(self.download_thread, 'file_progress_signal'): self.download_thread.file_progress_signal.disconnect(self.update_file_progress_display)
|
||||
if hasattr(self.download_thread, 'missed_character_post_signal'): self.download_thread.missed_character_post_signal.disconnect(self.handle_missed_character_post)
|
||||
if hasattr(self.download_thread, 'retryable_file_failed_signal'): self.download_thread.retryable_file_failed_signal.disconnect(self._handle_retryable_file_failure)
|
||||
if hasattr(self.download_thread, 'file_successfully_downloaded_signal'): self.download_thread.file_successfully_downloaded_signal.disconnect(self._handle_actual_file_downloaded)
|
||||
if hasattr(self.download_thread, 'post_processed_for_history_signal'): self.download_thread.post_processed_for_history_signal.disconnect(self._add_to_history_candidates)
|
||||
except (TypeError, RuntimeError) as e:
|
||||
self.log_signal.emit(f"ℹ️ Note during single-thread signal disconnection: {e}")
|
||||
|
||||
if not self.download_thread.isRunning():
|
||||
if self.download_thread:
|
||||
if not self.download_thread.isRunning():
|
||||
self.download_thread.deleteLater()
|
||||
self.download_thread = None
|
||||
else:
|
||||
self.download_thread = None
|
||||
|
||||
self.progress_label.setText(
|
||||
@ -4435,10 +4743,10 @@ class DownloaderApp (QWidget ):
|
||||
"Would you like to attempt to download these failed files again?",
|
||||
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
|
||||
if reply == QMessageBox.Yes:
|
||||
self.is_finishing = False # Allow retry session to start
|
||||
self.finish_lock.release() # Release lock for the retry session
|
||||
self.is_finishing = False
|
||||
self.finish_lock.release()
|
||||
self._start_failed_files_retry_session()
|
||||
return # Exit to allow retry session to run
|
||||
return
|
||||
else:
|
||||
self.log_signal.emit("ℹ️ User chose not to retry failed files.")
|
||||
self.permanently_failed_files_for_dialog.extend(self.retryable_failed_files_info)
|
||||
@ -4737,7 +5045,7 @@ class DownloaderApp (QWidget ):
|
||||
self.is_paused = False
|
||||
|
||||
self._clear_session_file()
|
||||
self._reset_ui_to_defaults()
|
||||
self._perform_soft_ui_reset()
|
||||
self._load_saved_download_location()
|
||||
self.main_log_output.clear()
|
||||
self.external_log_output.clear()
|
||||
|
||||
@ -141,12 +141,15 @@ def prepare_cookies_for_request(use_cookie_flag, cookie_text_input, selected_coo
|
||||
def extract_post_info(url_string):
|
||||
"""
|
||||
Parses a URL string to extract the service, user ID, and post ID.
|
||||
UPDATED to support Discord server/channel URLs.
|
||||
|
||||
Args:
|
||||
url_string (str): The URL to parse.
|
||||
|
||||
Returns:
|
||||
tuple: A tuple containing (service, user_id, post_id). Any can be None.
|
||||
tuple: A tuple containing (service, id1, id2).
|
||||
For posts: (service, user_id, post_id).
|
||||
For Discord: ('discord', server_id, channel_id).
|
||||
"""
|
||||
if not isinstance(url_string, str) or not url_string.strip():
|
||||
return None, None, None
|
||||
@ -155,7 +158,15 @@ def extract_post_info(url_string):
|
||||
parsed_url = urlparse(url_string.strip())
|
||||
path_parts = [part for part in parsed_url.path.strip('/').split('/') if part]
|
||||
|
||||
# Standard format: /<service>/user/<user_id>/post/<post_id>
|
||||
# Check for new Discord URL format first
|
||||
# e.g., /discord/server/891670433978531850/1252332668805189723
|
||||
if len(path_parts) >= 3 and path_parts[0].lower() == 'discord' and path_parts[1].lower() == 'server':
|
||||
service = 'discord'
|
||||
server_id = path_parts[2]
|
||||
channel_id = path_parts[3] if len(path_parts) >= 4 else None
|
||||
return service, server_id, channel_id
|
||||
|
||||
# Standard creator/post format: /<service>/user/<user_id>/post/<post_id>
|
||||
if len(path_parts) >= 3 and path_parts[1].lower() == 'user':
|
||||
service = path_parts[0]
|
||||
user_id = path_parts[2]
|
||||
@ -174,7 +185,6 @@ def extract_post_info(url_string):
|
||||
|
||||
return None, None, None
|
||||
|
||||
|
||||
def get_link_platform(url):
|
||||
"""
|
||||
Identifies the platform of a given URL based on its domain.
|
||||
|
||||
@ -391,6 +391,10 @@ def setup_ui(main_app):
|
||||
main_app.link_search_button.setVisible(False)
|
||||
main_app.link_search_button.setFixedWidth(int(30 * scale))
|
||||
log_title_layout.addWidget(main_app.link_search_button)
|
||||
main_app.discord_scope_toggle_button = QPushButton("Scope: Files")
|
||||
main_app.discord_scope_toggle_button.setVisible(False) # Hidden by default
|
||||
main_app.discord_scope_toggle_button.setFixedWidth(int(140 * scale))
|
||||
log_title_layout.addWidget(main_app.discord_scope_toggle_button)
|
||||
main_app.manga_rename_toggle_button = QPushButton()
|
||||
main_app.manga_rename_toggle_button.setVisible(False)
|
||||
main_app.manga_rename_toggle_button.setFixedWidth(int(140 * scale))
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user